From d5214f02e1eab030958382cba18c7e94b3e98aac Mon Sep 17 00:00:00 2001
From: Alan Orth
Date: Thu, 9 Feb 2023 08:50:54 +0300
Subject: [PATCH] Add notes for 2023-02-08
---
content/posts/2023-02.md | 90 +++++++
docs/2015-11/index.html | 6 +-
docs/2015-12/index.html | 6 +-
docs/2016-01/index.html | 6 +-
docs/2016-02/index.html | 6 +-
docs/2016-03/index.html | 6 +-
docs/2016-04/index.html | 6 +-
docs/2016-05/index.html | 6 +-
docs/2016-06/index.html | 6 +-
docs/2016-07/index.html | 6 +-
docs/2016-08/index.html | 6 +-
docs/2016-09/index.html | 6 +-
docs/2016-10/index.html | 6 +-
docs/2016-11/index.html | 6 +-
docs/2016-12/index.html | 6 +-
docs/2017-01/index.html | 6 +-
docs/2017-02/index.html | 6 +-
docs/2017-03/index.html | 6 +-
docs/2017-04/index.html | 6 +-
docs/2017-05/index.html | 6 +-
docs/2017-06/index.html | 6 +-
docs/2017-07/index.html | 6 +-
docs/2017-08/index.html | 6 +-
docs/2017-09/index.html | 6 +-
docs/2017-10/index.html | 6 +-
docs/2017-11/index.html | 6 +-
docs/2017-12/index.html | 6 +-
docs/2018-01/index.html | 6 +-
docs/2018-02/index.html | 6 +-
docs/2018-03/index.html | 6 +-
docs/2018-04/index.html | 6 +-
docs/2018-05/index.html | 6 +-
docs/2018-06/index.html | 6 +-
docs/2018-07/index.html | 6 +-
docs/2018-08/index.html | 6 +-
docs/2018-09/index.html | 6 +-
docs/2018-10/index.html | 6 +-
docs/2018-11/index.html | 6 +-
docs/2018-12/index.html | 6 +-
docs/2019-01/index.html | 6 +-
docs/2019-02/index.html | 6 +-
docs/2019-03/index.html | 6 +-
docs/2019-04/index.html | 6 +-
docs/2019-05/index.html | 6 +-
docs/2019-06/index.html | 6 +-
docs/2019-07/index.html | 6 +-
docs/2019-08/index.html | 6 +-
docs/2019-09/index.html | 6 +-
docs/2019-10/index.html | 6 +-
docs/2019-11/index.html | 6 +-
docs/2019-12/index.html | 6 +-
docs/2020-01/index.html | 6 +-
docs/2020-02/index.html | 6 +-
docs/2020-03/index.html | 6 +-
docs/2020-04/index.html | 6 +-
docs/2020-05/index.html | 6 +-
docs/2020-06/index.html | 6 +-
docs/2020-07/index.html | 6 +-
docs/2020-08/index.html | 6 +-
docs/2020-09/index.html | 6 +-
docs/2020-10/index.html | 6 +-
docs/2020-11/index.html | 6 +-
docs/2020-12/index.html | 6 +-
docs/2021-01/index.html | 6 +-
docs/2021-02/index.html | 6 +-
docs/2021-03/index.html | 6 +-
docs/2021-04/index.html | 6 +-
docs/2021-05/index.html | 6 +-
docs/2021-06/index.html | 6 +-
docs/2021-07/index.html | 6 +-
docs/2021-08/index.html | 6 +-
docs/2021-09/index.html | 6 +-
docs/2021-10/index.html | 6 +-
docs/2021-11/index.html | 6 +-
docs/2021-12/index.html | 6 +-
docs/2022-01/index.html | 6 +-
docs/2022-02/index.html | 6 +-
docs/2022-03/index.html | 6 +-
docs/2022-04/index.html | 6 +-
docs/2022-05/index.html | 6 +-
docs/2022-06/index.html | 6 +-
docs/2022-07/index.html | 6 +-
docs/2022-08/index.html | 6 +-
docs/2022-09/index.html | 6 +-
docs/2022-10/index.html | 6 +-
docs/2022-11/index.html | 6 +-
docs/2022-12/index.html | 6 +-
docs/2023-01/index.html | 10 +-
docs/2023-02/index.html | 285 +++++++++++++++++++++
docs/404.html | 6 +-
docs/categories/index.html | 10 +-
docs/categories/index.xml | 4 +-
docs/categories/notes/index.html | 51 ++--
docs/categories/notes/index.xml | 18 +-
docs/categories/notes/page/2/index.html | 53 ++--
docs/categories/notes/page/3/index.html | 66 +++--
docs/categories/notes/page/4/index.html | 71 +++--
docs/categories/notes/page/5/index.html | 85 +++---
docs/categories/notes/page/6/index.html | 75 ++++--
docs/categories/notes/page/7/index.html | 30 ++-
docs/cgiar-library-migration/index.html | 6 +-
docs/cgspace-cgcorev2-migration/index.html | 6 +-
docs/cgspace-dspace6-upgrade/index.html | 6 +-
docs/index.html | 53 ++--
docs/index.xml | 18 +-
docs/page/10/index.html | 199 ++++++++++++++
docs/page/2/index.html | 55 ++--
docs/page/3/index.html | 68 +++--
docs/page/4/index.html | 73 +++---
docs/page/5/index.html | 87 +++----
docs/page/6/index.html | 77 ++++--
docs/page/7/index.html | 59 ++---
docs/page/8/index.html | 63 ++---
docs/page/9/index.html | 66 ++---
docs/posts/index.html | 53 ++--
docs/posts/index.xml | 18 +-
docs/posts/page/10/index.html | 199 ++++++++++++++
docs/posts/page/2/index.html | 55 ++--
docs/posts/page/3/index.html | 68 +++--
docs/posts/page/4/index.html | 73 +++---
docs/posts/page/5/index.html | 87 +++----
docs/posts/page/6/index.html | 77 ++++--
docs/posts/page/7/index.html | 59 ++---
docs/posts/page/8/index.html | 63 ++---
docs/posts/page/9/index.html | 66 ++---
docs/robots.txt | 3 +-
docs/sitemap.xml | 15 +-
docs/tags/index.html | 6 +-
docs/tags/migration/index.html | 6 +-
docs/tags/notes/index.html | 6 +-
docs/tags/notes/page/2/index.html | 6 +-
docs/tags/notes/page/3/index.html | 6 +-
132 files changed, 1960 insertions(+), 1112 deletions(-)
create mode 100644 content/posts/2023-02.md
create mode 100644 docs/2023-02/index.html
create mode 100644 docs/page/10/index.html
create mode 100644 docs/posts/page/10/index.html
diff --git a/content/posts/2023-02.md b/content/posts/2023-02.md
new file mode 100644
index 000000000..e1a09bbae
--- /dev/null
+++ b/content/posts/2023-02.md
@@ -0,0 +1,90 @@
+---
+title: "February, 2023"
+date: 2023-02-01T10:57:36+03:00
+author: "Alan Orth"
+categories: ["Notes"]
+---
+
+## 2023-02-01
+
+- Export CGSpace to cross check the DOI metadata with Crossref
+ - I want to try to expand my use of their data to journals, publishers, volumes, issues, etc...
+
+
+
+- First, extract a list of DOIs for use with `crossref-doi-lookup.py`:
+
+```console
+$ csvcut -c 'cg.identifier.doi[en_US]' ~/Downloads/2023-02-01-cgspace.csv \
+ | csvgrep -c 1 -m 'doi.org' \
+ | csvgrep -c 1 -m ' ' -i \
+ | csvgrep -c 1 -r '.*cifor.*' -i \
+ | sed 1d > /tmp/2023-02-01-dois.txt
+$ ./ilri/crossref-doi-lookup.py -e a.orth@cgiar.org -i /tmp/2023-02-01-dois.txt -o ~/Downloads/2023-01-31-crossref-results.csv -d
+```
+
+- Then extract the ID, DOI, journal, volume, issue, publisher, etc from the CGSpace dump and rename the `cg.identifier.doi[en_US]` to `doi` so we can join on it with the Crossref results file:
+
+```console
+$ csvcut -c 'id,cg.identifier.doi[en_US],cg.journal[en_US],cg.volume[en_US],cg.issue[en_US],dcterms.publisher[en_US],cg.number[en_US],dcterms.license[en_US]' ~/Downloads/2023-02-01-cgspace.csv \
+ | csvgrep -c 'cg.identifier.doi[en_US]' -r '.*cifor.*' -i \
+ | sed -e '1s/cg.identifier.doi\[en_US\]/doi/' \
+ -e 's_https://doi.org/__g' \
+ -e 's_https://dx.doi.org/__g' \
+ > /tmp/2023-02-01-cgspace-doi-metadata.csv
+$ csvjoin -c doi /tmp/2023-02-01-cgspace-doi-metadata.csv ~/Downloads/2023-02-01-crossref-results.csv > /tmp/2023-02-01-cgspace-crossref-check.csv
+```
+
+- And import into OpenRefine for analysis and cleaning
+- I just noticed that Crossref also has types, so we could use that in the future too!
+- I got a few corrections after examining manually, but I didn't manage to identify any patterns that I could use to do any automatic matching or cleaning
+
+## 2023-02-05
+
+- Normalize text lang attributes in PostgreSQL, run a quick Discovery index, and then export CGSpace to check Initiative mappings and countries/regions
+- Run all system updates on CGSpace (linode18) and reboot it
+
+## 2023-02-06
+
+- Peter said that a new Initiative was approved last month so we need to add it to CGSpace: `Fragility, Conflict, and Migration`
+- There is lots of discussion about the "issue date" versus "available date" with Enrico and IFPRI, after lots of feedback from the PRMS QA
+ - I filed [an issue on CG Core to propose using `dcterms.available` as an optional field to indicate the online date](https://github.com/AgriculturalSemantics/cg-core/issues/43)
+
+
+## 2023-02-07
+
+- IFPRI's web developer Tony managed to get his Drupal harvester to have a useful user agent:
+
+```console
+54.x.x.x - - [06/Feb/2023:10:10:32 +0100] "POST /rest/items/find-by-metadata-field?limit=%22100&offset=0 HTTP/1.1" 200 58855 "-" "IFPRI drupal POST harvester"
+```
+
+- He also noticed that there is no pagination on POST requests to `/rest/items/find-by-metadata-field`, and that he needs to increase his timeout for requests that return 100+ results, ie:
+
+
+```console
+curl -f -H "Content-Type: application/json" -X POST "https://dspacetest.cgiar.org/rest/items/find-by-metadata-field" -d '{"key":"cg.subject.actionArea", "value":"Systems Transformation", "language": "en_US"}'
+```
+
+- I need to ask on the DSpace Slack about this POST pagination
+- Abenet and Udana noticed that the Handle server was not running
+ - Looking in the `error.log` file I see that the service is complaining about a lock file being present
+ - This is because Linode had to do emergency maintenance on the VM host this morning and the Handle server didn't shut down properly
+- I'm having an issue with `poetry update` so I spent some time debugging and filed [an issue](https://github.com/python-poetry/poetry/issues/7482)
+- Proof and import nine items for the Digital Innovation Inititive for IFPRI
+ - There were only some minor issues in the metadata
+ - I also did a duplicate check with `check-duplicates.py` just in case
+- I did some minor updates on csv-metadata-quality
+ - First, to reduce warnings on non-SPDX licenses like "Copyrighted; all rights reserved" and "Other" since they are very common for us and I'm sick of seeing the warnings
+ - Second, to skip whitespace and newline fixes on the abstract field since so many times they are intended
+
+## 2023-02-08
+
+- Make some edits to IFPRI records requested by Jawoo and Leigh
+- Help Alessandra upload a last minute report for SAPLING
+- Proof and upload twenty-seven IFPRI records to CGSpace
+ - It's a good thing I did a duplicate check because I found three duplicates!
+- Export CGSpace to update Initiative mappings and country/region mappings
+ - Then start a harvest on AReS
+
+
diff --git a/docs/2015-11/index.html b/docs/2015-11/index.html
index 4263c0fe6..d9440ef3a 100644
--- a/docs/2015-11/index.html
+++ b/docs/2015-11/index.html
@@ -34,7 +34,7 @@ Last week I had increased the limit from 30 to 60, which seemed to help, but now
$ psql -c 'SELECT * from pg_stat_activity;' | grep idle | grep -c cgspace
78
"/>
-
+
@@ -242,6 +242,8 @@ db.statementpool = true
+
-
diff --git a/docs/2016-01/index.html b/docs/2016-01/index.html
index 41f1c54f8..47ec9e435 100644
--- a/docs/2016-01/index.html
+++ b/docs/2016-01/index.html
@@ -28,7 +28,7 @@ Move ILRI collection 10568/12503 from 10568/27869 to 10568/27629 using the move_
I realized it is only necessary to clear the Cocoon cache after moving collections—rather than reindexing—as no metadata has changed, and therefore no search or browse indexes need to be updated.
Update GitHub wiki for documentation of maintenance tasks.
"/>
-
+
@@ -200,6 +200,8 @@ $ find SimpleArchiveForBio/ -iname “*.pdf” -exec basename {} ; | sor
+
-
diff --git a/docs/2016-02/index.html b/docs/2016-02/index.html
index 914b505e4..80b2ba083 100644
--- a/docs/2016-02/index.html
+++ b/docs/2016-02/index.html
@@ -38,7 +38,7 @@ I noticed we have a very interesting list of countries on CGSpace:
Not only are there 49,000 countries, we have some blanks (25)…
Also, lots of things like “COTE D`LVOIRE” and “COTE D IVOIRE”
"/>
-
+
@@ -378,6 +378,8 @@ Bitstream: tést señora alimentación.pdf
+
-
diff --git a/docs/2016-03/index.html b/docs/2016-03/index.html
index e842656f5..fe2b51c31 100644
--- a/docs/2016-03/index.html
+++ b/docs/2016-03/index.html
@@ -28,7 +28,7 @@ Looking at issues with author authorities on CGSpace
For some reason we still have the index-lucene-update cron job active on CGSpace, but I’m pretty sure we don’t need it as of the latest few versions of Atmire’s Listings and Reports module
Reinstall my local (Mac OS X) DSpace stack with Tomcat 7, PostgreSQL 9.3, and Java JDK 1.7 to match environment on CGSpace server
"/>
-
+
@@ -316,6 +316,8 @@ Reinstall my local (Mac OS X) DSpace stack with Tomcat 7, PostgreSQL 9.3, and Ja
+
-
diff --git a/docs/2016-04/index.html b/docs/2016-04/index.html
index 0d795af56..d0ef8ce39 100644
--- a/docs/2016-04/index.html
+++ b/docs/2016-04/index.html
@@ -32,7 +32,7 @@ After running DSpace for over five years I’ve never needed to look in any
This will save us a few gigs of backup space we’re paying for on S3
Also, I noticed the checker log has some errors we should pay attention to:
"/>
-
+
@@ -495,6 +495,8 @@ dspace.log.2016-04-27:7271
+
-
diff --git a/docs/2016-06/index.html b/docs/2016-06/index.html
index 565fba7dd..f8aaac3c9 100644
--- a/docs/2016-06/index.html
+++ b/docs/2016-06/index.html
@@ -34,7 +34,7 @@ This is their publications set: http://ebrary.ifpri.org/oai/oai.php?verb=ListRec
You can see the others by using the OAI ListSets verb: http://ebrary.ifpri.org/oai/oai.php?verb=ListSets
Working on second phase of metadata migration, looks like this will work for moving CPWF-specific data in dc.identifier.fund to cg.identifier.cpwfproject and then the rest to dc.description.sponsorship
"/>
-
+
@@ -409,6 +409,8 @@ $ ./delete-metadata-values.py -f dc.contributor.corporate -i Corporate-Authors-D
+
-
diff --git a/docs/2016-07/index.html b/docs/2016-07/index.html
index e60cfa081..a79f6d99d 100644
--- a/docs/2016-07/index.html
+++ b/docs/2016-07/index.html
@@ -44,7 +44,7 @@ dspacetest=# select text_value from metadatavalue where metadata_field_id=3 and
In this case the select query was showing 95 results before the update
"/>
-
+
@@ -325,6 +325,8 @@ discovery.index.authority.ignore-variants=true
+
-
diff --git a/docs/2016-12/index.html b/docs/2016-12/index.html
index f633af3dc..baad332eb 100644
--- a/docs/2016-12/index.html
+++ b/docs/2016-12/index.html
@@ -46,7 +46,7 @@ I see thousands of them in the logs for the last few months, so it’s not r
I’ve raised a ticket with Atmire to ask
Another worrying error from dspace.log is:
"/>
-
+
@@ -784,6 +784,8 @@ $ exit
+
-
diff --git a/docs/2017-01/index.html b/docs/2017-01/index.html
index a1ce263be..0dde9d54c 100644
--- a/docs/2017-01/index.html
+++ b/docs/2017-01/index.html
@@ -28,7 +28,7 @@ I checked to see if the Solr sharding task that is supposed to run on January 1s
I tested on DSpace Test as well and it doesn’t work there either
I asked on the dspace-tech mailing list because it seems to be broken, and actually now I’m not sure if we’ve ever had the sharding task run successfully over all these years
"/>
-
+
@@ -369,6 +369,8 @@ $ gs -sDEVICE=pdfwrite -dCompatibilityLevel=1.4 -dPDFSETTINGS=/ebook -dNOPAUSE -
+
-
diff --git a/docs/2017-02/index.html b/docs/2017-02/index.html
index b066d90ba..1212aa846 100644
--- a/docs/2017-02/index.html
+++ b/docs/2017-02/index.html
@@ -50,7 +50,7 @@ DELETE 1
Create issue on GitHub to track the addition of CCAFS Phase II project tags (#301)
Looks like we’ll be using cg.identifier.ccafsprojectpii as the field name
"/>
-
+
@@ -423,6 +423,8 @@ COPY 1968
+
-
diff --git a/docs/2017-07/index.html b/docs/2017-07/index.html
index 7f42b2082..9b23b95e8 100644
--- a/docs/2017-07/index.html
+++ b/docs/2017-07/index.html
@@ -36,7 +36,7 @@ Merge changes for WLE Phase II theme rename (#329)
Looking at extracting the metadata registries from ICARDA’s MEL DSpace database so we can compare fields with CGSpace
We can use PostgreSQL’s extended output format (-x) plus sed to format the output into quasi XML:
"/>
-
+
@@ -275,6 +275,8 @@ delete from metadatavalue where resource_type_id=2 and metadata_field_id=235 and
+
-
diff --git a/docs/2017-08/index.html b/docs/2017-08/index.html
index 7671b77bd..058fd9e21 100644
--- a/docs/2017-08/index.html
+++ b/docs/2017-08/index.html
@@ -60,7 +60,7 @@ This was due to newline characters in the dc.description.abstract column, which
I exported a new CSV from the collection on DSpace Test and then manually removed the characters in vim using g/^$/d
Then I cleaned up the author authorities and HTML characters in OpenRefine and sent the file back to Abenet
"/>
-
+
@@ -517,6 +517,8 @@ org.apache.commons.dbcp.SQLNestedException: Cannot get a connection, pool error
+
-
diff --git a/docs/2017-09/index.html b/docs/2017-09/index.html
index f31defabc..5f0fba4fe 100644
--- a/docs/2017-09/index.html
+++ b/docs/2017-09/index.html
@@ -32,7 +32,7 @@ Linode sent an alert that CGSpace (linode18) was using 261% CPU for the past two
Ask Sisay to clean up the WLE approvers a bit, as Marianne’s user account is both in the approvers step as well as the group
"/>
-
+
@@ -659,6 +659,8 @@ Cert Status: good
+
-
diff --git a/docs/2017-10/index.html b/docs/2017-10/index.html
index 5c2c02cc4..3bef734f9 100644
--- a/docs/2017-10/index.html
+++ b/docs/2017-10/index.html
@@ -34,7 +34,7 @@ http://hdl.handle.net/10568/78495||http://hdl.handle.net/10568/79336
There appears to be a pattern but I’ll have to look a bit closer and try to clean them up automatically, either in SQL or in OpenRefine
Add Katherine Lutz to the groups for content submission and edit steps of the CGIAR System collections
"/>
-
+
@@ -443,6 +443,8 @@ session_id=6C30F10B4351A4ED83EC6ED50AFD6B6A
+
-
diff --git a/docs/2017-11/index.html b/docs/2017-11/index.html
index cec9ce47a..97aa866ca 100644
--- a/docs/2017-11/index.html
+++ b/docs/2017-11/index.html
@@ -48,7 +48,7 @@ Generate list of authors on CGSpace for Peter to go through and correct:
dspace=# \copy (select distinct text_value, count(*) as count from metadatavalue where metadata_field_id = (select metadata_field_id from metadatafieldregistry where element = 'contributor' and qualifier = 'author') AND resource_type_id = 2 group by text_value order by count desc) to /tmp/authors.csv with csv;
COPY 54701
"/>
-
+
@@ -944,6 +944,8 @@ $ cat dspace.log.2017-11-28 | grep -o -E 'session_id=[A-Z0-9]{32}' | sor
+
-
diff --git a/docs/2017-12/index.html b/docs/2017-12/index.html
index 5a0a4fec7..e2de8ea71 100644
--- a/docs/2017-12/index.html
+++ b/docs/2017-12/index.html
@@ -30,7 +30,7 @@ The logs say “Timeout waiting for idle object”
PostgreSQL activity says there are 115 connections currently
The list of connections to XMLUI and REST API for today:
"/>
-
+
@@ -783,6 +783,8 @@ DELETE 20
+
-
diff --git a/docs/2018-01/index.html b/docs/2018-01/index.html
index f37aaa58c..0fd55120f 100644
--- a/docs/2018-01/index.html
+++ b/docs/2018-01/index.html
@@ -150,7 +150,7 @@ dspace.log.2018-01-02:34
Danny wrote to ask for help renewing the wildcard ilri.org certificate and I advised that we should probably use Let’s Encrypt if it’s just a handful of domains
"/>
-
+
@@ -1452,6 +1452,8 @@ Catalina:type=Manager,context=/,host=localhost activeSessions 8
+
-
diff --git a/docs/2018-02/index.html b/docs/2018-02/index.html
index adf2567ed..c7002f49e 100644
--- a/docs/2018-02/index.html
+++ b/docs/2018-02/index.html
@@ -30,7 +30,7 @@ We don’t need to distinguish between internal and external works, so that
Yesterday I figured out how to monitor DSpace sessions using JMX
I copied the logic in the jmx_tomcat_dbpools provided by Ubuntu’s munin-plugins-java package and used the stuff I discovered about JMX in 2018-01
"/>
-
+
@@ -1038,6 +1038,8 @@ UPDATE 3
+
-
diff --git a/docs/2018-03/index.html b/docs/2018-03/index.html
index b6632b194..d317c103e 100644
--- a/docs/2018-03/index.html
+++ b/docs/2018-03/index.html
@@ -24,7 +24,7 @@ Export a CSV of the IITA community metadata for Martin Mueller
Export a CSV of the IITA community metadata for Martin Mueller
"/>
-
+
@@ -585,6 +585,8 @@ Fixed 5 occurences of: GENEBANKS
+
-
diff --git a/docs/2018-04/index.html b/docs/2018-04/index.html
index 40057e508..5784becea 100644
--- a/docs/2018-04/index.html
+++ b/docs/2018-04/index.html
@@ -26,7 +26,7 @@ Catalina logs at least show some memory errors yesterday:
I tried to test something on DSpace Test but noticed that it’s down since god knows when
Catalina logs at least show some memory errors yesterday:
"/>
-
+
@@ -594,6 +594,8 @@ $ pg_restore -O -U dspacetest -d dspacetest -W -h localhost /tmp/dspace_2018-04-
+
-
diff --git a/docs/2018-05/index.html b/docs/2018-05/index.html
index 336f5daf8..0d55622b5 100644
--- a/docs/2018-05/index.html
+++ b/docs/2018-05/index.html
@@ -38,7 +38,7 @@ http://localhost:3000/solr/statistics/update?stream.body=%3Ccommit/%3E
Then I reduced the JVM heap size from 6144 back to 5120m
Also, I switched it to use OpenJDK instead of Oracle Java, as well as re-worked the Ansible infrastructure scripts to support hosts choosing which distribution they want to use
"/>
-
+
@@ -523,6 +523,8 @@ $ psql -h localhost -U postgres dspacetest
+
-
diff --git a/docs/2018-07/index.html b/docs/2018-07/index.html
index edf4c4089..1bd7eef0f 100644
--- a/docs/2018-07/index.html
+++ b/docs/2018-07/index.html
@@ -36,7 +36,7 @@ During the mvn package stage on the 5.8 branch I kept getting issues with java r
There is insufficient memory for the Java Runtime Environment to continue.
"/>
-
+
@@ -569,6 +569,8 @@ dspace=# select count(text_value) from metadatavalue where resource_type_id=2 an
+
-
diff --git a/docs/2018-08/index.html b/docs/2018-08/index.html
index fde4c95cd..49933d29a 100644
--- a/docs/2018-08/index.html
+++ b/docs/2018-08/index.html
@@ -46,7 +46,7 @@ Anyways, perhaps I should increase the JVM heap from 5120m to 6144m like we did
The server only has 8GB of RAM so we’ll eventually need to upgrade to a larger one because we’ll start starving the OS, PostgreSQL, and command line batch processes
I ran all system updates on DSpace Test and rebooted it
"/>
-
+
@@ -442,6 +442,8 @@ $ dspace database migrate ignored
+
-
diff --git a/docs/2018-09/index.html b/docs/2018-09/index.html
index ba35e00ad..06fc6be13 100644
--- a/docs/2018-09/index.html
+++ b/docs/2018-09/index.html
@@ -30,7 +30,7 @@ I’ll update the DSpace role in our Ansible infrastructure playbooks and ru
Also, I’ll re-run the postgresql tasks because the custom PostgreSQL variables are dynamic according to the system’s RAM, and we never re-ran them after migrating to larger Linodes last month
I’m testing the new DSpace 5.8 branch in my Ubuntu 18.04 environment and I’m getting those autowire errors in Tomcat 8.5.30 again:
"/>
-
+
@@ -748,6 +748,8 @@ UPDATE metadatavalue SET text_value='ja' WHERE resource_type_id=2 AND me
+
-
diff --git a/docs/2018-10/index.html b/docs/2018-10/index.html
index fe1a0bdf9..4cb093c12 100644
--- a/docs/2018-10/index.html
+++ b/docs/2018-10/index.html
@@ -26,7 +26,7 @@ I created a GitHub issue to track this #389, because I’m super busy in Nai
Phil Thornton got an ORCID identifier so we need to add it to the list on CGSpace and tag his existing items
I created a GitHub issue to track this #389, because I’m super busy in Nairobi right now
"/>
-
+
@@ -656,6 +656,8 @@ $ curl -X GET -H "Content-Type: application/json" -H "Accept: applic
+
-
diff --git a/docs/2018-11/index.html b/docs/2018-11/index.html
index 691213302..f043a381b 100644
--- a/docs/2018-11/index.html
+++ b/docs/2018-11/index.html
@@ -36,7 +36,7 @@ Send a note about my dspace-statistics-api to the dspace-tech mailing list
Linode has been sending mails a few times a day recently that CGSpace (linode18) has had high CPU usage
Today these are the top 10 IPs:
"/>
-
+
@@ -553,6 +553,8 @@ $ dspace dsrun org.dspace.eperson.Groomer -a -b 11/27/2016 -d
+
-
diff --git a/docs/2018-12/index.html b/docs/2018-12/index.html
index 1b20cc955..161825d10 100644
--- a/docs/2018-12/index.html
+++ b/docs/2018-12/index.html
@@ -36,7 +36,7 @@ Then I ran all system updates and restarted the server
I noticed that there is another issue with PDF thumbnails on CGSpace, and I see there was another Ghostscript vulnerability last week
"/>
-
+
@@ -594,6 +594,8 @@ UPDATE 1
+
-
diff --git a/docs/2019-01/index.html b/docs/2019-01/index.html
index 8afc327bb..9ed7ea897 100644
--- a/docs/2019-01/index.html
+++ b/docs/2019-01/index.html
@@ -50,7 +50,7 @@ I don’t see anything interesting in the web server logs around that time t
357 207.46.13.1
903 54.70.40.11
"/>
-
+
@@ -1264,6 +1264,8 @@ identify: CorruptImageProfile `xmp' @ warning/profile.c/SetImageProfileInter
+
-
diff --git a/docs/2019-03/index.html b/docs/2019-03/index.html
index 2409e81ab..2714d34b2 100644
--- a/docs/2019-03/index.html
+++ b/docs/2019-03/index.html
@@ -46,7 +46,7 @@ Most worryingly, there are encoding errors in the abstracts for eleven items, fo
I think I will need to ask Udana to re-copy and paste the abstracts with more care using Google Docs
"/>
-
+
@@ -1208,6 +1208,8 @@ sys 0m2.551s
+
-
diff --git a/docs/2019-05/index.html b/docs/2019-05/index.html
index 39fc36ebd..5a021f365 100644
--- a/docs/2019-05/index.html
+++ b/docs/2019-05/index.html
@@ -48,7 +48,7 @@ DELETE 1
But after this I tried to delete the item from the XMLUI and it is still present…
"/>
-
+
@@ -631,6 +631,8 @@ COPY 64871
+
-
diff --git a/docs/2019-06/index.html b/docs/2019-06/index.html
index 008593975..fcf1ed7d0 100644
--- a/docs/2019-06/index.html
+++ b/docs/2019-06/index.html
@@ -34,7 +34,7 @@ Run system updates on CGSpace (linode18) and reboot it
Skype with Marie-Angélique and Abenet about CG Core v2
"/>
-
+
@@ -317,6 +317,8 @@ UPDATE 2
+
-
diff --git a/docs/2019-07/index.html b/docs/2019-07/index.html
index 47b2969f8..47dff9176 100644
--- a/docs/2019-07/index.html
+++ b/docs/2019-07/index.html
@@ -38,7 +38,7 @@ CGSpace
Abenet had another similar issue a few days ago when trying to find the stats for 2018 in the RTB community
"/>
-
+
@@ -554,6 +554,8 @@ issn.validate('1020-3362')
+
-
diff --git a/docs/2019-08/index.html b/docs/2019-08/index.html
index 4fdbbd237..a752d7932 100644
--- a/docs/2019-08/index.html
+++ b/docs/2019-08/index.html
@@ -46,7 +46,7 @@ After rebooting, all statistics cores were loaded… wow, that’s luck
Run system updates on DSpace Test (linode19) and reboot it
"/>
-
+
@@ -573,6 +573,8 @@ sys 2m27.496s
+
-
diff --git a/docs/2019-09/index.html b/docs/2019-09/index.html
index f3028e598..7c9df1505 100644
--- a/docs/2019-09/index.html
+++ b/docs/2019-09/index.html
@@ -72,7 +72,7 @@ Here are the top ten IPs in the nginx XMLUI and REST/OAI logs this morning:
7249 2a01:7e00::f03c:91ff:fe18:7396
9124 45.5.186.2
"/>
-
+
@@ -581,6 +581,8 @@ $ csv-metadata-quality -i /tmp/clarisa-institutions.csv -o /tmp/clarisa-institut
+
-
diff --git a/docs/2019-12/index.html b/docs/2019-12/index.html
index 163ce4d1c..40e9baba0 100644
--- a/docs/2019-12/index.html
+++ b/docs/2019-12/index.html
@@ -46,7 +46,7 @@ Make sure all packages are up to date and the package manager is up to date, the
# dpkg -C
# reboot
"/>
-
+
@@ -404,6 +404,8 @@ UPDATE 1
+
-
diff --git a/docs/2020-04/index.html b/docs/2020-04/index.html
index f4051d0cc..4d3e97d84 100644
--- a/docs/2020-04/index.html
+++ b/docs/2020-04/index.html
@@ -48,7 +48,7 @@ The third item now has a donut with score 1 since I tweeted it last week
On the same note, the one item Abenet pointed out last week now has a donut with score of 104 after I tweeted it last week
"/>
-
+
@@ -658,6 +658,8 @@ $ psql -c 'select * from pg_stat_activity' | wc -l
+
-
diff --git a/docs/2020-05/index.html b/docs/2020-05/index.html
index a948e8068..812025ec5 100644
--- a/docs/2020-05/index.html
+++ b/docs/2020-05/index.html
@@ -34,7 +34,7 @@ I see that CGSpace (linode18) is still using PostgreSQL JDBC driver version 42.2
"/>
-
+
@@ -477,6 +477,8 @@ Caused by: java.lang.NullPointerException
+
-
diff --git a/docs/2020-06/index.html b/docs/2020-06/index.html
index ac70623b3..50f8418bf 100644
--- a/docs/2020-06/index.html
+++ b/docs/2020-06/index.html
@@ -36,7 +36,7 @@ I sent Atmire the dspace.log from today and told them to log into the server to
In other news, I checked the statistics API on DSpace 6 and it’s working
I tried to build the OAI registry on the freshly migrated DSpace 6 on DSpace Test and I get an error:
"/>
-
+
@@ -811,6 +811,8 @@ $ csvcut -c 'id,cg.subject.ilri[],cg.subject.ilri[en_US],dc.subject[en_US]
+
-
diff --git a/docs/2020-07/index.html b/docs/2020-07/index.html
index aed77d1de..bcb3d09b1 100644
--- a/docs/2020-07/index.html
+++ b/docs/2020-07/index.html
@@ -38,7 +38,7 @@ I restarted Tomcat and PostgreSQL and the issue was gone
Since I was restarting Tomcat anyways I decided to redeploy the latest changes from the 5_x-prod branch and I added a note about COVID-19 items to the CGSpace frontpage at Peter’s request
"/>
-
+
@@ -1142,6 +1142,8 @@ Fixed 4 occurences of: Muloi, D.M.
+
-
diff --git a/docs/2020-08/index.html b/docs/2020-08/index.html
index 98f5d685d..573f9a8a4 100644
--- a/docs/2020-08/index.html
+++ b/docs/2020-08/index.html
@@ -36,7 +36,7 @@ It is class based so I can easily add support for other vocabularies, and the te
"/>
-
+
@@ -798,6 +798,8 @@ $ grep -c added /tmp/2020-08-27-countrycodetagger.log
+
-
diff --git a/docs/2020-09/index.html b/docs/2020-09/index.html
index bd5942929..95422c09b 100644
--- a/docs/2020-09/index.html
+++ b/docs/2020-09/index.html
@@ -48,7 +48,7 @@ I filed a bug on OpenRXV: https://github.com/ilri/OpenRXV/issues/39
I filed an issue on OpenRXV to make some minor edits to the admin UI: https://github.com/ilri/OpenRXV/issues/40
"/>
-
+
@@ -717,6 +717,8 @@ solr_query_params = {
+
-
diff --git a/docs/2020-11/index.html b/docs/2020-11/index.html
index 923a77af3..7e63eb63e 100644
--- a/docs/2020-11/index.html
+++ b/docs/2020-11/index.html
@@ -32,7 +32,7 @@ So far we’ve spent at least fifty hours to process the statistics and stat
"/>
-
+
@@ -731,6 +731,8 @@ $ ./fix-metadata-values.py -i 2020-11-30-fix-hung-orcid.csv -db dspace63 -u dspa
+
-
diff --git a/docs/2021-01/index.html b/docs/2021-01/index.html
index 0da61f378..c75abbe3c 100644
--- a/docs/2021-01/index.html
+++ b/docs/2021-01/index.html
@@ -50,7 +50,7 @@ For example, this item has 51 views on CGSpace, but 0 on AReS
"/>
-
+
@@ -688,6 +688,8 @@ java.lang.IllegalArgumentException: Invalid character found in the request targe
+
-
diff --git a/docs/2021-03/index.html b/docs/2021-03/index.html
index 42cf27d1b..e8dfd43e3 100644
--- a/docs/2021-03/index.html
+++ b/docs/2021-03/index.html
@@ -34,7 +34,7 @@ Also, we found some issues building and running OpenRXV currently due to ecosyst
"/>
-
+
@@ -875,6 +875,8 @@ Also, we found some issues building and running OpenRXV currently due to ecosyst
+
-
diff --git a/docs/2021-04/index.html b/docs/2021-04/index.html
index 753950fd0..aecdb9ef2 100644
--- a/docs/2021-04/index.html
+++ b/docs/2021-04/index.html
@@ -44,7 +44,7 @@ Perhaps one of the containers crashed, I should have looked closer but I was in
"/>
-
+
@@ -1042,6 +1042,8 @@ $ chrt -b 0 dspace dsrun com.atmire.statistics.util.update.atomic.AtomicStatisti
+
-
diff --git a/docs/2021-05/index.html b/docs/2021-05/index.html
index ac3ae1645..2a86ef21d 100644
--- a/docs/2021-05/index.html
+++ b/docs/2021-05/index.html
@@ -36,7 +36,7 @@ I looked at the top user agents and IPs in the Solr statistics for last month an
I will add the RI/1.0 pattern to our DSpace agents overload and purge them from Solr (we had previously seen this agent with 9,000 hits or so in 2020-09), but I think I will leave the Microsoft Word one… as that’s an actual user…
"/>
-
+
@@ -685,6 +685,8 @@ May 26, 02:57 UTC
+
-
diff --git a/docs/2021-06/index.html b/docs/2021-06/index.html
index a8da9a039..600a076bd 100644
--- a/docs/2021-06/index.html
+++ b/docs/2021-06/index.html
@@ -36,7 +36,7 @@ I simply started it and AReS was running again:
"/>
-
+
@@ -693,6 +693,8 @@ I simply started it and AReS was running again:
+
-
diff --git a/docs/2021-07/index.html b/docs/2021-07/index.html
index 6aeb3705c..56d3cf696 100644
--- a/docs/2021-07/index.html
+++ b/docs/2021-07/index.html
@@ -30,7 +30,7 @@ Export another list of ALL subjects on CGSpace, including AGROVOC and non-AGROVO
localhost/dspace63= > \COPY (SELECT DISTINCT LOWER(text_value) AS subject, count(*) FROM metadatavalue WHERE dspace_object_id in (SELECT dspace_object_id FROM item) AND metadata_field_id IN (119, 120, 127, 122, 128, 125, 135, 203, 208, 210, 215, 123, 236, 242, 187) GROUP BY subject ORDER BY count DESC) to /tmp/2021-07-01-all-subjects.csv WITH CSV HEADER;
COPY 20994
"/>
-
+
@@ -715,6 +715,8 @@ COPY 20994
+
-
diff --git a/docs/2021-08/index.html b/docs/2021-08/index.html
index fadd8db4b..6de09940e 100644
--- a/docs/2021-08/index.html
+++ b/docs/2021-08/index.html
@@ -32,7 +32,7 @@ Update Docker images on AReS server (linode20) and reboot the server:
I decided to upgrade linode20 from Ubuntu 18.04 to 20.04
"/>
-
+
@@ -606,6 +606,8 @@ I decided to upgrade linode20 from Ubuntu 18.04 to 20.04
+
-
diff --git a/docs/2021-09/index.html b/docs/2021-09/index.html
index 36c069bae..f92c36c4c 100644
--- a/docs/2021-09/index.html
+++ b/docs/2021-09/index.html
@@ -48,7 +48,7 @@ The syntax Moayad showed me last month doesn’t seem to honor the search qu
"/>
-
+
@@ -588,6 +588,8 @@ The syntax Moayad showed me last month doesn’t seem to honor the search qu
+
-
diff --git a/docs/2021-10/index.html b/docs/2021-10/index.html
index 18d46590b..984bf9661 100644
--- a/docs/2021-10/index.html
+++ b/docs/2021-10/index.html
@@ -46,7 +46,7 @@ $ wc -l /tmp/2021-10-01-affiliations.txt
So we have 1879/7100 (26.46%) matching already
"/>
-
+
@@ -791,6 +791,8 @@ Try doing it in two imports. In first import, remove all authors. In second impo
+
-
diff --git a/docs/2021-12/index.html b/docs/2021-12/index.html
index 75a8306be..6afba2638 100644
--- a/docs/2021-12/index.html
+++ b/docs/2021-12/index.html
@@ -40,7 +40,7 @@ Purging 455 hits from WhatsApp in statistics
Total number of bot hits purged: 3679
"/>
-
+
@@ -577,6 +577,8 @@ Total number of bot hits purged: 3679
+
-
diff --git a/docs/2022-01/index.html b/docs/2022-01/index.html
index e0b9df9a1..39408e66c 100644
--- a/docs/2022-01/index.html
+++ b/docs/2022-01/index.html
@@ -24,7 +24,7 @@ Start a full harvest on AReS
Start a full harvest on AReS
"/>
-
+
@@ -380,6 +380,8 @@ Start a full harvest on AReS
+
-
diff --git a/docs/2022-02/index.html b/docs/2022-02/index.html
index f4cc15ffd..ece12fe4b 100644
--- a/docs/2022-02/index.html
+++ b/docs/2022-02/index.html
@@ -38,7 +38,7 @@ We agreed to try to do more alignment of affiliations/funders with ROR
"/>
-
+
@@ -724,6 +724,8 @@ isNotNull(value.match('699'))
+
-
diff --git a/docs/2022-05/index.html b/docs/2022-05/index.html
index ea6209e39..a33a9e629 100644
--- a/docs/2022-05/index.html
+++ b/docs/2022-05/index.html
@@ -66,7 +66,7 @@ If I query Solr for time:2022-04* AND dns:*msnbot* AND dns:*.msn.com. I see a ha
I purged 93,974 hits from these IPs using my check-spider-ip-hits.sh script
"/>
-
+
@@ -445,6 +445,8 @@ I purged 93,974 hits from these IPs using my check-spider-ip-hits.sh script
+
-
diff --git a/docs/2022-06/index.html b/docs/2022-06/index.html
index 0e895747d..a02127c5e 100644
--- a/docs/2022-06/index.html
+++ b/docs/2022-06/index.html
@@ -48,7 +48,7 @@ There seem to be many more of these:
"/>
-
+
@@ -458,6 +458,8 @@ There seem to be many more of these:
+
-
diff --git a/docs/2022-07/index.html b/docs/2022-07/index.html
index 71d0cef8e..1f1d63fb9 100644
--- a/docs/2022-07/index.html
+++ b/docs/2022-07/index.html
@@ -34,7 +34,7 @@ Also, the trgm functions I’ve used before are case insensitive, but Levens
"/>
-
+
@@ -736,6 +736,8 @@ Also, the trgm functions I’ve used before are case insensitive, but Levens
+
-
diff --git a/docs/2022-08/index.html b/docs/2022-08/index.html
index b95110952..28dd0e4d9 100644
--- a/docs/2022-08/index.html
+++ b/docs/2022-08/index.html
@@ -24,7 +24,7 @@ Our request to add CC-BY-3.0-IGO to SPDX was approved a few weeks ago
Our request to add CC-BY-3.0-IGO to SPDX was approved a few weeks ago
"/>
-
+
@@ -522,6 +522,8 @@ Our request to add CC-BY-3.0-IGO to SPDX was approved a few weeks ago
+
-
diff --git a/docs/2022-09/index.html b/docs/2022-09/index.html
index 0426e622b..1d9a39112 100644
--- a/docs/2022-09/index.html
+++ b/docs/2022-09/index.html
@@ -46,7 +46,7 @@ I also fixed a few bugs and improved the region-matching logic
"/>
-
+
@@ -783,6 +783,8 @@ harvesting of meat from wildlife and not from livestock.
-
diff --git a/docs/2022-10/index.html b/docs/2022-10/index.html
index 453973c8f..b9f151651 100644
--- a/docs/2022-10/index.html
+++ b/docs/2022-10/index.html
@@ -36,7 +36,7 @@ I filed an issue to ask about Java 11+ support
"/>
-
+
@@ -978,6 +978,8 @@ I filed an issue to ask about Java 11+ support
+
-
diff --git a/docs/2022-11/index.html b/docs/2022-11/index.html
index 7f849e35c..4b3350c16 100644
--- a/docs/2022-11/index.html
+++ b/docs/2022-11/index.html
@@ -44,7 +44,7 @@ I want to make sure they use groups instead of individuals where possible!
I reverted the Cocoon autosave change because it was more of a nuissance that Peter can’t upload CSVs from the web interface and is a very low severity security issue
"/>
-
+
@@ -757,6 +757,8 @@ I reverted the Cocoon autosave change because it was more of a nuissance that Pe
+
-
diff --git a/docs/2022-12/index.html b/docs/2022-12/index.html
index c3b1a893a..54115ab2d 100644
--- a/docs/2022-12/index.html
+++ b/docs/2022-12/index.html
@@ -36,7 +36,7 @@ I exported the CCAFS and IITA communities, extracted just the country and region
Add a few more authors to my CSV with author names and ORCID identifiers and tag 283 items!
Replace “East Asia” with “Eastern Asia” region on CGSpace (UN M.49 region)
"/>
-
+
@@ -577,6 +577,8 @@ Replace “East Asia” with “Eastern Asia” region on CGSpac
+
-
diff --git a/docs/2023-01/index.html b/docs/2023-01/index.html
index 47fd7101e..b6140a326 100644
--- a/docs/2023-01/index.html
+++ b/docs/2023-01/index.html
@@ -19,7 +19,7 @@ I see we have some new ones that aren’t in our list if I combine with this
-
+
@@ -34,7 +34,7 @@ I see we have some new ones that aren’t in our list if I combine with this
"/>
-
+
@@ -46,7 +46,7 @@ I see we have some new ones that aren’t in our list if I combine with this
"url": "https://alanorth.github.io/cgspace-notes/2023-01/",
"wordCount": "4361",
"datePublished": "2023-01-01T08:44:36+03:00",
- "dateModified": "2023-01-29T18:19:31+03:00",
+ "dateModified": "2023-01-31T22:20:38+03:00",
"author": {
"@type": "Person",
"name": "Alan Orth"
@@ -827,6 +827,8 @@ I see we have some new ones that aren’t in our list if I combine with this
+
Then extract the ID, DOI, journal, volume, issue, publisher, etc from the CGSpace dump and rename the cg.identifier.doi[en_US] to doi so we can join on it with the Crossref results file:
And import into OpenRefine for analysis and cleaning
+
I just noticed that Crossref also has types, so we could use that in the future too!
+
I got a few corrections after examining manually, but I didn’t manage to identify any patterns that I could use to do any automatic matching or cleaning
+
+
2023-02-05
+
+
Normalize text lang attributes in PostgreSQL, run a quick Discovery index, and then export CGSpace to check Initiative mappings and countries/regions
+
Run all system updates on CGSpace (linode18) and reboot it
+
+
2023-02-06
+
+
Peter said that a new Initiative was approved last month so we need to add it to CGSpace: Fragility, Conflict, and Migration
+
There is lots of discussion about the “issue date” versus “available date” with Enrico and IFPRI, after lots of feedback from the PRMS QA
+
He also noticed that there is no pagination on POST requests to /rest/items/find-by-metadata-field, and that he needs to increase his timeout for requests that return 100+ results, ie:
I need to ask on the DSpace Slack about this POST pagination
+
Abenet and Udana noticed that the Handle server was not running
+
+
Looking in the error.log file I see that the service is complaining about a lock file being present
+
This is because Linode had to do emergency maintenance on the VM host this morning and the Handle server didn’t shut down properly
+
+
+
I’m having an issue with poetry update so I spent some time debugging and filed an issue
+
Proof and import nine items for the Digital Innovation Inititive for IFPRI
+
+
There were only some minor issues in the metadata
+
I also did a duplicate check with check-duplicates.py just in case
+
+
+
I did some minor updates on csv-metadata-quality
+
+
First, to reduce warnings on non-SPDX licenses like “Copyrighted; all rights reserved” and “Other” since they are very common for us and I’m sick of seeing the warnings
+
Second, to skip whitespace and newline fixes on the abstract field since so many times they are intended
+
+
+
+
2023-02-08
+
+
Make some edits to IFPRI records requested by Jawoo and Leigh
+
Help Alessandra upload a last minute report for SAPLING
+
Proof and upload twenty-seven IFPRI records to CGSpace
+
+
It’s a good thing I did a duplicate check because I found three duplicates!
+
+
+
Export CGSpace to update Initiative mappings and country/region mappings
+
-
- 2022-04-01 I did G1GC tests on DSpace Test (linode26) to compliment the CMS tests I did yesterday The Discovery indexing took this long: real 334m33.625s user 227m51.331s sys 3m43.037s 2022-04-04 Start a full harvest on AReS Help Marianne with submit/approve access on a new collection on CGSpace Go back in Gaia’s batch reports to find records that she indicated for replacing on CGSpace (ie, those with better new copies, new versions, etc) Looking at the Solr statistics for 2022-03 on CGSpace I see 54.
- Read more →
-
-
-
-
-
-