From 4200ae4189fa0103373a21b67e6ada587765dd7d Mon Sep 17 00:00:00 2001 From: Alan Orth Date: Wed, 7 Dec 2022 22:59:37 +0100 Subject: [PATCH] Add notes for 2022-12-07 --- content/posts/2022-12.md | 30 ++++++++++++++- docs/2015-11/index.html | 2 +- docs/2015-12/index.html | 2 +- docs/2016-01/index.html | 2 +- docs/2016-02/index.html | 2 +- docs/2016-03/index.html | 2 +- docs/2016-04/index.html | 2 +- docs/2016-05/index.html | 2 +- docs/2016-06/index.html | 2 +- docs/2016-07/index.html | 2 +- docs/2016-08/index.html | 2 +- docs/2016-09/index.html | 2 +- docs/2016-10/index.html | 2 +- docs/2016-11/index.html | 2 +- docs/2016-12/index.html | 2 +- docs/2017-01/index.html | 2 +- docs/2017-02/index.html | 2 +- docs/2017-03/index.html | 2 +- docs/2017-04/index.html | 2 +- docs/2017-05/index.html | 2 +- docs/2017-06/index.html | 2 +- docs/2017-07/index.html | 2 +- docs/2017-08/index.html | 2 +- docs/2017-09/index.html | 2 +- docs/2017-10/index.html | 2 +- docs/2017-11/index.html | 2 +- docs/2017-12/index.html | 2 +- docs/2018-01/index.html | 2 +- docs/2018-02/index.html | 2 +- docs/2018-03/index.html | 2 +- docs/2018-04/index.html | 2 +- docs/2018-05/index.html | 2 +- docs/2018-06/index.html | 2 +- docs/2018-07/index.html | 2 +- docs/2018-08/index.html | 2 +- docs/2018-09/index.html | 2 +- docs/2018-10/index.html | 2 +- docs/2018-11/index.html | 2 +- docs/2018-12/index.html | 2 +- docs/2019-01/index.html | 2 +- docs/2019-02/index.html | 2 +- docs/2019-03/index.html | 2 +- docs/2019-04/index.html | 2 +- docs/2019-05/index.html | 2 +- docs/2019-06/index.html | 2 +- docs/2019-07/index.html | 2 +- docs/2019-08/index.html | 2 +- docs/2019-09/index.html | 2 +- docs/2019-10/index.html | 2 +- docs/2019-11/index.html | 2 +- docs/2019-12/index.html | 2 +- docs/2020-01/index.html | 2 +- docs/2020-02/index.html | 2 +- docs/2020-03/index.html | 2 +- docs/2020-04/index.html | 2 +- docs/2020-05/index.html | 2 +- docs/2020-06/index.html | 2 +- docs/2020-07/index.html | 2 +- docs/2020-08/index.html | 2 +- docs/2020-09/index.html | 2 +- docs/2020-10/index.html | 2 +- docs/2020-11/index.html | 2 +- docs/2020-12/index.html | 2 +- docs/2021-01/index.html | 2 +- docs/2021-02/index.html | 2 +- docs/2021-03/index.html | 2 +- docs/2021-04/index.html | 2 +- docs/2021-05/index.html | 2 +- docs/2021-06/index.html | 2 +- docs/2021-07/index.html | 2 +- docs/2021-08/index.html | 2 +- docs/2021-09/index.html | 2 +- docs/2021-10/index.html | 2 +- docs/2021-11/index.html | 2 +- docs/2021-12/index.html | 2 +- docs/2022-01/index.html | 2 +- docs/2022-02/index.html | 2 +- docs/2022-03/index.html | 2 +- docs/2022-04/index.html | 2 +- docs/2022-05/index.html | 2 +- docs/2022-06/index.html | 2 +- docs/2022-07/index.html | 2 +- docs/2022-08/index.html | 2 +- docs/2022-09/index.html | 2 +- docs/2022-10/index.html | 2 +- docs/2022-11/index.html | 2 +- docs/2022-12/index.html | 45 +++++++++++++++++++--- docs/404.html | 2 +- docs/categories/index.html | 4 +- docs/categories/notes/index.html | 4 +- docs/categories/notes/page/2/index.html | 4 +- docs/categories/notes/page/3/index.html | 4 +- docs/categories/notes/page/4/index.html | 4 +- docs/categories/notes/page/5/index.html | 4 +- docs/categories/notes/page/6/index.html | 4 +- docs/categories/notes/page/7/index.html | 4 +- docs/cgiar-library-migration/index.html | 2 +- docs/cgspace-cgcorev2-migration/index.html | 2 +- docs/cgspace-dspace6-upgrade/index.html | 2 +- docs/index.html | 4 +- docs/page/2/index.html | 4 +- docs/page/3/index.html | 4 +- docs/page/4/index.html | 4 +- docs/page/5/index.html | 4 +- docs/page/6/index.html | 4 +- docs/page/7/index.html | 4 +- docs/page/8/index.html | 4 +- docs/page/9/index.html | 4 +- docs/posts/index.html | 4 +- docs/posts/page/2/index.html | 4 +- docs/posts/page/3/index.html | 4 +- docs/posts/page/4/index.html | 4 +- docs/posts/page/5/index.html | 4 +- docs/posts/page/6/index.html | 4 +- docs/posts/page/7/index.html | 4 +- docs/posts/page/8/index.html | 4 +- docs/posts/page/9/index.html | 4 +- docs/sitemap.xml | 10 ++--- docs/tags/index.html | 2 +- docs/tags/migration/index.html | 2 +- docs/tags/notes/index.html | 2 +- docs/tags/notes/page/2/index.html | 2 +- docs/tags/notes/page/3/index.html | 2 +- 123 files changed, 218 insertions(+), 159 deletions(-) diff --git a/content/posts/2022-12.md b/content/posts/2022-12.md index e2c5e1825..1a9955cbd 100644 --- a/content/posts/2022-12.md +++ b/content/posts/2022-12.md @@ -46,7 +46,7 @@ $ wc -l ~/Downloads/2022-12-03-CLARISA-institutions.txt $ sed -e 's_ / _\n_' -e 's_/_\n_' -e 's/ \?(.*)$//' ~/Downloads/2022-12-03-CLARISA-institutions.txt > ~/Downloads/2022-12-03-CLARISA-institutions-alan.txt ``` -- I checked CGSpace's top 1,000 institutions too, first exporting from PostgreSQL: +- I checked CGSpace's top 1,000 affiliations too, first exporting from PostgreSQL: ```console localhost/dspacetest= ☘ \COPY (SELECT DISTINCT text_value as "cg.contributor.affiliation", count(*) FROM metadatavalue WHERE dspace_object_id IN (SELECT uuid FROM item) AND metadata_field_id = 211 GROUP BY text_value ORDER BY count DESC LIMIT 1000) to /tmp/2022-11-22-affiliations.csv; @@ -61,6 +61,32 @@ $ csvgrep -c matched -m true /tmp/cgspace-matches.csv | wc -l 542 ``` -- So that's a 54% match for our top institutions +- So that's a 54% match for our top affiliations +- I realized we should actually check affiliations and sponsors, since those are stored in separate fields + - When I add those the matches go down a bit to 45% +- Oh man, I realized institutions like `Université d'Abomey Calavi` don't match in ROR because they are like this in the JSON: + +```console +"name": "Universit\u00e9 d'Abomey-Calavi" +``` + +- So we likely match a bunch more than 50%... +- I exported a list of affiliations and donors from CGSpace for Peter to look over and send corrections + +## 2022-12-05 + +- First day of PRMS technical workshop in Rome +- Last night I submitted a CSV import with changes to 1,500 Alliance items (adding regions) and it hadn't completed after twenty-four hours so I canceled it + - Not sure if there is some rollback that will happen or what state the database will be in, so I will wait a few hours to see what happens before trying to modify those items again + - I started it again a few hours later with a subset of the items and 4GB of RAM instead of 2 + - It completed successfully... + +## 2022-12-07 + +- I found a bug in my csv-metadata-quality script regarding the regions + - I was accidentally checking `cg.coverage.subregion` due to a sloppy regex + - This means I've added a few thousand UN M.49 regions to the `cg.coverage.subregion` field in the last few days + - I had to extract them from CGSpace and delete them using `delete-metadata-values.py` +- My [DSpace 7.x pull request to tell ImageMagick about the PDF CropBox](https://github.com/DSpace/DSpace/pull/8550) was merged diff --git a/docs/2015-11/index.html b/docs/2015-11/index.html index aea675473..17ae61a36 100644 --- a/docs/2015-11/index.html +++ b/docs/2015-11/index.html @@ -34,7 +34,7 @@ Last week I had increased the limit from 30 to 60, which seemed to help, but now $ psql -c 'SELECT * from pg_stat_activity;' | grep idle | grep -c cgspace 78 "/> - + diff --git a/docs/2015-12/index.html b/docs/2015-12/index.html index 0dc2a4a5e..545ed9ed8 100644 --- a/docs/2015-12/index.html +++ b/docs/2015-12/index.html @@ -36,7 +36,7 @@ Replace lzop with xz in log compression cron jobs on DSpace Test—it uses less -rw-rw-r-- 1 tomcat7 tomcat7 387K Nov 18 23:59 dspace.log.2015-11-18.lzo -rw-rw-r-- 1 tomcat7 tomcat7 169K Nov 18 23:59 dspace.log.2015-11-18.xz "/> - + diff --git a/docs/2016-01/index.html b/docs/2016-01/index.html index 9313fc8ba..05a728692 100644 --- a/docs/2016-01/index.html +++ b/docs/2016-01/index.html @@ -28,7 +28,7 @@ Move ILRI collection 10568/12503 from 10568/27869 to 10568/27629 using the move_ I realized it is only necessary to clear the Cocoon cache after moving collections—rather than reindexing—as no metadata has changed, and therefore no search or browse indexes need to be updated. Update GitHub wiki for documentation of maintenance tasks. "/> - + diff --git a/docs/2016-02/index.html b/docs/2016-02/index.html index 19ceee7c9..d9b18b838 100644 --- a/docs/2016-02/index.html +++ b/docs/2016-02/index.html @@ -38,7 +38,7 @@ I noticed we have a very interesting list of countries on CGSpace: Not only are there 49,000 countries, we have some blanks (25)… Also, lots of things like “COTE D`LVOIRE” and “COTE D IVOIRE” "/> - + diff --git a/docs/2016-03/index.html b/docs/2016-03/index.html index f078e4be0..06054ba4d 100644 --- a/docs/2016-03/index.html +++ b/docs/2016-03/index.html @@ -28,7 +28,7 @@ Looking at issues with author authorities on CGSpace For some reason we still have the index-lucene-update cron job active on CGSpace, but I’m pretty sure we don’t need it as of the latest few versions of Atmire’s Listings and Reports module Reinstall my local (Mac OS X) DSpace stack with Tomcat 7, PostgreSQL 9.3, and Java JDK 1.7 to match environment on CGSpace server "/> - + diff --git a/docs/2016-04/index.html b/docs/2016-04/index.html index 69ad7f471..c94c8e834 100644 --- a/docs/2016-04/index.html +++ b/docs/2016-04/index.html @@ -32,7 +32,7 @@ After running DSpace for over five years I’ve never needed to look in any This will save us a few gigs of backup space we’re paying for on S3 Also, I noticed the checker log has some errors we should pay attention to: "/> - + diff --git a/docs/2016-05/index.html b/docs/2016-05/index.html index 1a57784ae..d034b70f8 100644 --- a/docs/2016-05/index.html +++ b/docs/2016-05/index.html @@ -34,7 +34,7 @@ There are 3,000 IPs accessing the REST API in a 24-hour period! # awk '{print $1}' /var/log/nginx/rest.log | uniq | wc -l 3168 "/> - + diff --git a/docs/2016-06/index.html b/docs/2016-06/index.html index b77a8bbb6..0e50c02fe 100644 --- a/docs/2016-06/index.html +++ b/docs/2016-06/index.html @@ -34,7 +34,7 @@ This is their publications set: http://ebrary.ifpri.org/oai/oai.php?verb=ListRec You can see the others by using the OAI ListSets verb: http://ebrary.ifpri.org/oai/oai.php?verb=ListSets Working on second phase of metadata migration, looks like this will work for moving CPWF-specific data in dc.identifier.fund to cg.identifier.cpwfproject and then the rest to dc.description.sponsorship "/> - + diff --git a/docs/2016-07/index.html b/docs/2016-07/index.html index 29610e1b4..517d5d57e 100644 --- a/docs/2016-07/index.html +++ b/docs/2016-07/index.html @@ -44,7 +44,7 @@ dspacetest=# select text_value from metadatavalue where metadata_field_id=3 and In this case the select query was showing 95 results before the update "/> - + diff --git a/docs/2016-08/index.html b/docs/2016-08/index.html index 3e6a4ce92..2eba7601a 100644 --- a/docs/2016-08/index.html +++ b/docs/2016-08/index.html @@ -42,7 +42,7 @@ $ git checkout -b 55new 5_x-prod $ git reset --hard ilri/5_x-prod $ git rebase -i dspace-5.5 "/> - + diff --git a/docs/2016-09/index.html b/docs/2016-09/index.html index c8c983f38..dbbdde285 100644 --- a/docs/2016-09/index.html +++ b/docs/2016-09/index.html @@ -34,7 +34,7 @@ It looks like we might be able to use OUs now, instead of DCs: $ ldapsearch -x -H ldaps://svcgroot2.cgiarad.org:3269/ -b "dc=cgiarad,dc=org" -D "admigration1@cgiarad.org" -W "(sAMAccountName=admigration1)" "/> - + diff --git a/docs/2016-10/index.html b/docs/2016-10/index.html index 07547a4d5..6eb4f9647 100644 --- a/docs/2016-10/index.html +++ b/docs/2016-10/index.html @@ -42,7 +42,7 @@ I exported a random item’s metadata as CSV, deleted all columns except id 0000-0002-6115-0956||0000-0002-3812-8793||0000-0001-7462-405X "/> - + diff --git a/docs/2016-11/index.html b/docs/2016-11/index.html index 4286a939b..60cb11b6b 100644 --- a/docs/2016-11/index.html +++ b/docs/2016-11/index.html @@ -26,7 +26,7 @@ Add dc.type to the output options for Atmire’s Listings and Reports module Add dc.type to the output options for Atmire’s Listings and Reports module (#286) "/> - + diff --git a/docs/2016-12/index.html b/docs/2016-12/index.html index aa2e6f010..be6a61ece 100644 --- a/docs/2016-12/index.html +++ b/docs/2016-12/index.html @@ -46,7 +46,7 @@ I see thousands of them in the logs for the last few months, so it’s not r I’ve raised a ticket with Atmire to ask Another worrying error from dspace.log is: "/> - + diff --git a/docs/2017-01/index.html b/docs/2017-01/index.html index a210a9399..ba3eebd75 100644 --- a/docs/2017-01/index.html +++ b/docs/2017-01/index.html @@ -28,7 +28,7 @@ I checked to see if the Solr sharding task that is supposed to run on January 1s I tested on DSpace Test as well and it doesn’t work there either I asked on the dspace-tech mailing list because it seems to be broken, and actually now I’m not sure if we’ve ever had the sharding task run successfully over all these years "/> - + diff --git a/docs/2017-02/index.html b/docs/2017-02/index.html index 3e1307918..b00eff663 100644 --- a/docs/2017-02/index.html +++ b/docs/2017-02/index.html @@ -50,7 +50,7 @@ DELETE 1 Create issue on GitHub to track the addition of CCAFS Phase II project tags (#301) Looks like we’ll be using cg.identifier.ccafsprojectpii as the field name "/> - + diff --git a/docs/2017-03/index.html b/docs/2017-03/index.html index 15e9051a6..44547d9d9 100644 --- a/docs/2017-03/index.html +++ b/docs/2017-03/index.html @@ -54,7 +54,7 @@ Interestingly, it seems DSpace 4.x’s thumbnails were sRGB, but forcing reg $ identify ~/Desktop/alc_contrastes_desafios.jpg /Users/aorth/Desktop/alc_contrastes_desafios.jpg JPEG 464x600 464x600+0+0 8-bit CMYK 168KB 0.000u 0:00.000 "/> - + diff --git a/docs/2017-04/index.html b/docs/2017-04/index.html index b967bf126..7cf8cecd0 100644 --- a/docs/2017-04/index.html +++ b/docs/2017-04/index.html @@ -40,7 +40,7 @@ Testing the CMYK patch on a collection with 650 items: $ [dspace]/bin/dspace filter-media -f -i 10568/16498 -p "ImageMagick PDF Thumbnail" -v >& /tmp/filter-media-cmyk.txt "/> - + diff --git a/docs/2017-05/index.html b/docs/2017-05/index.html index 3089ac716..d1aef46a3 100644 --- a/docs/2017-05/index.html +++ b/docs/2017-05/index.html @@ -18,7 +18,7 @@ - + diff --git a/docs/2017-06/index.html b/docs/2017-06/index.html index f19954fca..7d490b233 100644 --- a/docs/2017-06/index.html +++ b/docs/2017-06/index.html @@ -18,7 +18,7 @@ - + diff --git a/docs/2017-07/index.html b/docs/2017-07/index.html index 38709db2d..437269e71 100644 --- a/docs/2017-07/index.html +++ b/docs/2017-07/index.html @@ -36,7 +36,7 @@ Merge changes for WLE Phase II theme rename (#329) Looking at extracting the metadata registries from ICARDA’s MEL DSpace database so we can compare fields with CGSpace We can use PostgreSQL’s extended output format (-x) plus sed to format the output into quasi XML: "/> - + diff --git a/docs/2017-08/index.html b/docs/2017-08/index.html index b486ee12a..9c67f5c26 100644 --- a/docs/2017-08/index.html +++ b/docs/2017-08/index.html @@ -60,7 +60,7 @@ This was due to newline characters in the dc.description.abstract column, which I exported a new CSV from the collection on DSpace Test and then manually removed the characters in vim using g/^$/d Then I cleaned up the author authorities and HTML characters in OpenRefine and sent the file back to Abenet "/> - + diff --git a/docs/2017-09/index.html b/docs/2017-09/index.html index 81bdc576d..96ef090ce 100644 --- a/docs/2017-09/index.html +++ b/docs/2017-09/index.html @@ -32,7 +32,7 @@ Linode sent an alert that CGSpace (linode18) was using 261% CPU for the past two Ask Sisay to clean up the WLE approvers a bit, as Marianne’s user account is both in the approvers step as well as the group "/> - + diff --git a/docs/2017-10/index.html b/docs/2017-10/index.html index 90faeea56..388af1a88 100644 --- a/docs/2017-10/index.html +++ b/docs/2017-10/index.html @@ -34,7 +34,7 @@ http://hdl.handle.net/10568/78495||http://hdl.handle.net/10568/79336 There appears to be a pattern but I’ll have to look a bit closer and try to clean them up automatically, either in SQL or in OpenRefine Add Katherine Lutz to the groups for content submission and edit steps of the CGIAR System collections "/> - + diff --git a/docs/2017-11/index.html b/docs/2017-11/index.html index 737a21519..62ffab3fa 100644 --- a/docs/2017-11/index.html +++ b/docs/2017-11/index.html @@ -48,7 +48,7 @@ Generate list of authors on CGSpace for Peter to go through and correct: dspace=# \copy (select distinct text_value, count(*) as count from metadatavalue where metadata_field_id = (select metadata_field_id from metadatafieldregistry where element = 'contributor' and qualifier = 'author') AND resource_type_id = 2 group by text_value order by count desc) to /tmp/authors.csv with csv; COPY 54701 "/> - + diff --git a/docs/2017-12/index.html b/docs/2017-12/index.html index 319af8713..bbb7a669f 100644 --- a/docs/2017-12/index.html +++ b/docs/2017-12/index.html @@ -30,7 +30,7 @@ The logs say “Timeout waiting for idle object” PostgreSQL activity says there are 115 connections currently The list of connections to XMLUI and REST API for today: "/> - + diff --git a/docs/2018-01/index.html b/docs/2018-01/index.html index 4b2729cc8..05774c15e 100644 --- a/docs/2018-01/index.html +++ b/docs/2018-01/index.html @@ -150,7 +150,7 @@ dspace.log.2018-01-02:34 Danny wrote to ask for help renewing the wildcard ilri.org certificate and I advised that we should probably use Let’s Encrypt if it’s just a handful of domains "/> - + diff --git a/docs/2018-02/index.html b/docs/2018-02/index.html index 3285224bc..960ebfc17 100644 --- a/docs/2018-02/index.html +++ b/docs/2018-02/index.html @@ -30,7 +30,7 @@ We don’t need to distinguish between internal and external works, so that Yesterday I figured out how to monitor DSpace sessions using JMX I copied the logic in the jmx_tomcat_dbpools provided by Ubuntu’s munin-plugins-java package and used the stuff I discovered about JMX in 2018-01 "/> - + diff --git a/docs/2018-03/index.html b/docs/2018-03/index.html index a7e907b33..97b9351ac 100644 --- a/docs/2018-03/index.html +++ b/docs/2018-03/index.html @@ -24,7 +24,7 @@ Export a CSV of the IITA community metadata for Martin Mueller Export a CSV of the IITA community metadata for Martin Mueller "/> - + diff --git a/docs/2018-04/index.html b/docs/2018-04/index.html index c809cd087..c9344b338 100644 --- a/docs/2018-04/index.html +++ b/docs/2018-04/index.html @@ -26,7 +26,7 @@ Catalina logs at least show some memory errors yesterday: I tried to test something on DSpace Test but noticed that it’s down since god knows when Catalina logs at least show some memory errors yesterday: "/> - + diff --git a/docs/2018-05/index.html b/docs/2018-05/index.html index d4a93c7c7..28f52cffe 100644 --- a/docs/2018-05/index.html +++ b/docs/2018-05/index.html @@ -38,7 +38,7 @@ http://localhost:3000/solr/statistics/update?stream.body=%3Ccommit/%3E Then I reduced the JVM heap size from 6144 back to 5120m Also, I switched it to use OpenJDK instead of Oracle Java, as well as re-worked the Ansible infrastructure scripts to support hosts choosing which distribution they want to use "/> - + diff --git a/docs/2018-06/index.html b/docs/2018-06/index.html index 3dab06c9f..9d4df0971 100644 --- a/docs/2018-06/index.html +++ b/docs/2018-06/index.html @@ -58,7 +58,7 @@ real 74m42.646s user 8m5.056s sys 2m7.289s "/> - + diff --git a/docs/2018-07/index.html b/docs/2018-07/index.html index a08838e11..900bf4b4e 100644 --- a/docs/2018-07/index.html +++ b/docs/2018-07/index.html @@ -36,7 +36,7 @@ During the mvn package stage on the 5.8 branch I kept getting issues with java r There is insufficient memory for the Java Runtime Environment to continue. "/> - + diff --git a/docs/2018-08/index.html b/docs/2018-08/index.html index 0ac308ad0..c6bdd9166 100644 --- a/docs/2018-08/index.html +++ b/docs/2018-08/index.html @@ -46,7 +46,7 @@ Anyways, perhaps I should increase the JVM heap from 5120m to 6144m like we did The server only has 8GB of RAM so we’ll eventually need to upgrade to a larger one because we’ll start starving the OS, PostgreSQL, and command line batch processes I ran all system updates on DSpace Test and rebooted it "/> - + diff --git a/docs/2018-09/index.html b/docs/2018-09/index.html index 3e9250eb7..7e1473a87 100644 --- a/docs/2018-09/index.html +++ b/docs/2018-09/index.html @@ -30,7 +30,7 @@ I’ll update the DSpace role in our Ansible infrastructure playbooks and ru Also, I’ll re-run the postgresql tasks because the custom PostgreSQL variables are dynamic according to the system’s RAM, and we never re-ran them after migrating to larger Linodes last month I’m testing the new DSpace 5.8 branch in my Ubuntu 18.04 environment and I’m getting those autowire errors in Tomcat 8.5.30 again: "/> - + diff --git a/docs/2018-10/index.html b/docs/2018-10/index.html index f75e0c78e..25ea385c8 100644 --- a/docs/2018-10/index.html +++ b/docs/2018-10/index.html @@ -26,7 +26,7 @@ I created a GitHub issue to track this #389, because I’m super busy in Nai Phil Thornton got an ORCID identifier so we need to add it to the list on CGSpace and tag his existing items I created a GitHub issue to track this #389, because I’m super busy in Nairobi right now "/> - + diff --git a/docs/2018-11/index.html b/docs/2018-11/index.html index 2a4c45940..a2b854552 100644 --- a/docs/2018-11/index.html +++ b/docs/2018-11/index.html @@ -36,7 +36,7 @@ Send a note about my dspace-statistics-api to the dspace-tech mailing list Linode has been sending mails a few times a day recently that CGSpace (linode18) has had high CPU usage Today these are the top 10 IPs: "/> - + diff --git a/docs/2018-12/index.html b/docs/2018-12/index.html index bdb3d9d4e..caf043152 100644 --- a/docs/2018-12/index.html +++ b/docs/2018-12/index.html @@ -36,7 +36,7 @@ Then I ran all system updates and restarted the server I noticed that there is another issue with PDF thumbnails on CGSpace, and I see there was another Ghostscript vulnerability last week "/> - + diff --git a/docs/2019-01/index.html b/docs/2019-01/index.html index 3c58fc79c..4fa8fd975 100644 --- a/docs/2019-01/index.html +++ b/docs/2019-01/index.html @@ -50,7 +50,7 @@ I don’t see anything interesting in the web server logs around that time t 357 207.46.13.1 903 54.70.40.11 "/> - + diff --git a/docs/2019-02/index.html b/docs/2019-02/index.html index 1fdfd84a1..02ce11b99 100644 --- a/docs/2019-02/index.html +++ b/docs/2019-02/index.html @@ -72,7 +72,7 @@ real 0m19.873s user 0m22.203s sys 0m1.979s "/> - + diff --git a/docs/2019-03/index.html b/docs/2019-03/index.html index 460a7995d..242fa0e2f 100644 --- a/docs/2019-03/index.html +++ b/docs/2019-03/index.html @@ -46,7 +46,7 @@ Most worryingly, there are encoding errors in the abstracts for eleven items, fo I think I will need to ask Udana to re-copy and paste the abstracts with more care using Google Docs "/> - + diff --git a/docs/2019-04/index.html b/docs/2019-04/index.html index 1563d2ae9..906947992 100644 --- a/docs/2019-04/index.html +++ b/docs/2019-04/index.html @@ -64,7 +64,7 @@ $ ./fix-metadata-values.py -i /tmp/2019-02-21-fix-4-regions.csv -db dspace -u ds $ ./delete-metadata-values.py -i /tmp/2019-02-21-delete-2-countries.csv -db dspace -u dspace -p 'fuuu' -m 228 -f cg.coverage.country -d $ ./delete-metadata-values.py -i /tmp/2019-02-21-delete-1-region.csv -db dspace -u dspace -p 'fuuu' -m 231 -f cg.coverage.region -d "/> - + diff --git a/docs/2019-05/index.html b/docs/2019-05/index.html index ecdb19957..06cec92d2 100644 --- a/docs/2019-05/index.html +++ b/docs/2019-05/index.html @@ -48,7 +48,7 @@ DELETE 1 But after this I tried to delete the item from the XMLUI and it is still present… "/> - + diff --git a/docs/2019-06/index.html b/docs/2019-06/index.html index 891397cf4..81d0037b6 100644 --- a/docs/2019-06/index.html +++ b/docs/2019-06/index.html @@ -34,7 +34,7 @@ Run system updates on CGSpace (linode18) and reboot it Skype with Marie-Angélique and Abenet about CG Core v2 "/> - + diff --git a/docs/2019-07/index.html b/docs/2019-07/index.html index 9b062fac5..b7b89a32c 100644 --- a/docs/2019-07/index.html +++ b/docs/2019-07/index.html @@ -38,7 +38,7 @@ CGSpace Abenet had another similar issue a few days ago when trying to find the stats for 2018 in the RTB community "/> - + diff --git a/docs/2019-08/index.html b/docs/2019-08/index.html index 4566dd2fc..d5baa5f8e 100644 --- a/docs/2019-08/index.html +++ b/docs/2019-08/index.html @@ -46,7 +46,7 @@ After rebooting, all statistics cores were loaded… wow, that’s luck Run system updates on DSpace Test (linode19) and reboot it "/> - + diff --git a/docs/2019-09/index.html b/docs/2019-09/index.html index 22d35f35c..6acfb4bf7 100644 --- a/docs/2019-09/index.html +++ b/docs/2019-09/index.html @@ -72,7 +72,7 @@ Here are the top ten IPs in the nginx XMLUI and REST/OAI logs this morning: 7249 2a01:7e00::f03c:91ff:fe18:7396 9124 45.5.186.2 "/> - + diff --git a/docs/2019-10/index.html b/docs/2019-10/index.html index a4add933c..4cb19e995 100644 --- a/docs/2019-10/index.html +++ b/docs/2019-10/index.html @@ -18,7 +18,7 @@ - + diff --git a/docs/2019-11/index.html b/docs/2019-11/index.html index ea6dd3646..37e384dd6 100644 --- a/docs/2019-11/index.html +++ b/docs/2019-11/index.html @@ -58,7 +58,7 @@ Let’s see how many of the REST API requests were for bitstreams (because t # zcat --force /var/log/nginx/rest.log.*.gz | grep -E "[0-9]{1,2}/Oct/2019" | grep -c -E "/rest/bitstreams" 106781 "/> - + diff --git a/docs/2019-12/index.html b/docs/2019-12/index.html index 6f0244898..62cf97c27 100644 --- a/docs/2019-12/index.html +++ b/docs/2019-12/index.html @@ -46,7 +46,7 @@ Make sure all packages are up to date and the package manager is up to date, the # dpkg -C # reboot "/> - + diff --git a/docs/2020-01/index.html b/docs/2020-01/index.html index 5998cff53..19adda2ba 100644 --- a/docs/2020-01/index.html +++ b/docs/2020-01/index.html @@ -56,7 +56,7 @@ I tweeted the CGSpace repository link "/> - + diff --git a/docs/2020-02/index.html b/docs/2020-02/index.html index 44c3ca3a7..1d917530c 100644 --- a/docs/2020-02/index.html +++ b/docs/2020-02/index.html @@ -38,7 +38,7 @@ The code finally builds and runs with a fresh install "/> - + diff --git a/docs/2020-03/index.html b/docs/2020-03/index.html index e47deb355..6d069e056 100644 --- a/docs/2020-03/index.html +++ b/docs/2020-03/index.html @@ -42,7 +42,7 @@ You need to download this into the DSpace 6.x source and compile it "/> - + diff --git a/docs/2020-04/index.html b/docs/2020-04/index.html index b4dd3311d..45f42a6ee 100644 --- a/docs/2020-04/index.html +++ b/docs/2020-04/index.html @@ -48,7 +48,7 @@ The third item now has a donut with score 1 since I tweeted it last week On the same note, the one item Abenet pointed out last week now has a donut with score of 104 after I tweeted it last week "/> - + diff --git a/docs/2020-05/index.html b/docs/2020-05/index.html index 994e485e5..87f33de1e 100644 --- a/docs/2020-05/index.html +++ b/docs/2020-05/index.html @@ -34,7 +34,7 @@ I see that CGSpace (linode18) is still using PostgreSQL JDBC driver version 42.2 "/> - + diff --git a/docs/2020-06/index.html b/docs/2020-06/index.html index efc237e3f..a86cf57bf 100644 --- a/docs/2020-06/index.html +++ b/docs/2020-06/index.html @@ -36,7 +36,7 @@ I sent Atmire the dspace.log from today and told them to log into the server to In other news, I checked the statistics API on DSpace 6 and it’s working I tried to build the OAI registry on the freshly migrated DSpace 6 on DSpace Test and I get an error: "/> - + diff --git a/docs/2020-07/index.html b/docs/2020-07/index.html index 4a188b152..19513adab 100644 --- a/docs/2020-07/index.html +++ b/docs/2020-07/index.html @@ -38,7 +38,7 @@ I restarted Tomcat and PostgreSQL and the issue was gone Since I was restarting Tomcat anyways I decided to redeploy the latest changes from the 5_x-prod branch and I added a note about COVID-19 items to the CGSpace frontpage at Peter’s request "/> - + diff --git a/docs/2020-08/index.html b/docs/2020-08/index.html index 4d201651e..a8740e466 100644 --- a/docs/2020-08/index.html +++ b/docs/2020-08/index.html @@ -36,7 +36,7 @@ It is class based so I can easily add support for other vocabularies, and the te "/> - + diff --git a/docs/2020-09/index.html b/docs/2020-09/index.html index b751c55a0..8829e9dfb 100644 --- a/docs/2020-09/index.html +++ b/docs/2020-09/index.html @@ -48,7 +48,7 @@ I filed a bug on OpenRXV: https://github.com/ilri/OpenRXV/issues/39 I filed an issue on OpenRXV to make some minor edits to the admin UI: https://github.com/ilri/OpenRXV/issues/40 "/> - + diff --git a/docs/2020-10/index.html b/docs/2020-10/index.html index 7a2f18e9b..a5a5ec38e 100644 --- a/docs/2020-10/index.html +++ b/docs/2020-10/index.html @@ -44,7 +44,7 @@ During the FlywayDB migration I got an error: "/> - + diff --git a/docs/2020-11/index.html b/docs/2020-11/index.html index a62575684..ee976551d 100644 --- a/docs/2020-11/index.html +++ b/docs/2020-11/index.html @@ -32,7 +32,7 @@ So far we’ve spent at least fifty hours to process the statistics and stat "/> - + diff --git a/docs/2020-12/index.html b/docs/2020-12/index.html index 2c14ecd71..8e6b772e9 100644 --- a/docs/2020-12/index.html +++ b/docs/2020-12/index.html @@ -36,7 +36,7 @@ I started processing those (about 411,000 records): "/> - + diff --git a/docs/2021-01/index.html b/docs/2021-01/index.html index d5aebb82e..23c2212c6 100644 --- a/docs/2021-01/index.html +++ b/docs/2021-01/index.html @@ -50,7 +50,7 @@ For example, this item has 51 views on CGSpace, but 0 on AReS "/> - + diff --git a/docs/2021-02/index.html b/docs/2021-02/index.html index eeeaa6d78..3aac45833 100644 --- a/docs/2021-02/index.html +++ b/docs/2021-02/index.html @@ -60,7 +60,7 @@ $ curl -s 'http://localhost:9200/openrxv-items-temp/_count?q=*&pretty } } "/> - + diff --git a/docs/2021-03/index.html b/docs/2021-03/index.html index 9ffbbb607..859c5af72 100644 --- a/docs/2021-03/index.html +++ b/docs/2021-03/index.html @@ -34,7 +34,7 @@ Also, we found some issues building and running OpenRXV currently due to ecosyst "/> - + diff --git a/docs/2021-04/index.html b/docs/2021-04/index.html index d2ade1192..ee17d19ad 100644 --- a/docs/2021-04/index.html +++ b/docs/2021-04/index.html @@ -44,7 +44,7 @@ Perhaps one of the containers crashed, I should have looked closer but I was in "/> - + diff --git a/docs/2021-05/index.html b/docs/2021-05/index.html index 67175f340..9098d7b0b 100644 --- a/docs/2021-05/index.html +++ b/docs/2021-05/index.html @@ -36,7 +36,7 @@ I looked at the top user agents and IPs in the Solr statistics for last month an I will add the RI/1.0 pattern to our DSpace agents overload and purge them from Solr (we had previously seen this agent with 9,000 hits or so in 2020-09), but I think I will leave the Microsoft Word one… as that’s an actual user… "/> - + diff --git a/docs/2021-06/index.html b/docs/2021-06/index.html index 36e627e94..386b61a6b 100644 --- a/docs/2021-06/index.html +++ b/docs/2021-06/index.html @@ -36,7 +36,7 @@ I simply started it and AReS was running again: "/> - + diff --git a/docs/2021-07/index.html b/docs/2021-07/index.html index 4bcb5468e..6df3063b7 100644 --- a/docs/2021-07/index.html +++ b/docs/2021-07/index.html @@ -30,7 +30,7 @@ Export another list of ALL subjects on CGSpace, including AGROVOC and non-AGROVO localhost/dspace63= > \COPY (SELECT DISTINCT LOWER(text_value) AS subject, count(*) FROM metadatavalue WHERE dspace_object_id in (SELECT dspace_object_id FROM item) AND metadata_field_id IN (119, 120, 127, 122, 128, 125, 135, 203, 208, 210, 215, 123, 236, 242, 187) GROUP BY subject ORDER BY count DESC) to /tmp/2021-07-01-all-subjects.csv WITH CSV HEADER; COPY 20994 "/> - + diff --git a/docs/2021-08/index.html b/docs/2021-08/index.html index e93e24424..c85755779 100644 --- a/docs/2021-08/index.html +++ b/docs/2021-08/index.html @@ -32,7 +32,7 @@ Update Docker images on AReS server (linode20) and reboot the server: I decided to upgrade linode20 from Ubuntu 18.04 to 20.04 "/> - + diff --git a/docs/2021-09/index.html b/docs/2021-09/index.html index 38823b28a..4ae2f5de6 100644 --- a/docs/2021-09/index.html +++ b/docs/2021-09/index.html @@ -48,7 +48,7 @@ The syntax Moayad showed me last month doesn’t seem to honor the search qu "/> - + diff --git a/docs/2021-10/index.html b/docs/2021-10/index.html index 771426bf6..91686e6eb 100644 --- a/docs/2021-10/index.html +++ b/docs/2021-10/index.html @@ -46,7 +46,7 @@ $ wc -l /tmp/2021-10-01-affiliations.txt So we have 1879/7100 (26.46%) matching already "/> - + diff --git a/docs/2021-11/index.html b/docs/2021-11/index.html index 8888439cf..1e184f3f9 100644 --- a/docs/2021-11/index.html +++ b/docs/2021-11/index.html @@ -32,7 +32,7 @@ First I exported all the 2019 stats from CGSpace: $ ./run.sh -s http://localhost:8081/solr/statistics -f 'time:2019-*' -a export -o statistics-2019.json -k uid $ zstd statistics-2019.json "/> - + diff --git a/docs/2021-12/index.html b/docs/2021-12/index.html index 024cfeb73..9d83226ac 100644 --- a/docs/2021-12/index.html +++ b/docs/2021-12/index.html @@ -40,7 +40,7 @@ Purging 455 hits from WhatsApp in statistics Total number of bot hits purged: 3679 "/> - + diff --git a/docs/2022-01/index.html b/docs/2022-01/index.html index 53f774bab..cdb9c346e 100644 --- a/docs/2022-01/index.html +++ b/docs/2022-01/index.html @@ -24,7 +24,7 @@ Start a full harvest on AReS Start a full harvest on AReS "/> - + diff --git a/docs/2022-02/index.html b/docs/2022-02/index.html index 3562b48fc..e8736efac 100644 --- a/docs/2022-02/index.html +++ b/docs/2022-02/index.html @@ -38,7 +38,7 @@ We agreed to try to do more alignment of affiliations/funders with ROR "/> - + diff --git a/docs/2022-03/index.html b/docs/2022-03/index.html index b69fd19d9..39e3e8ad9 100644 --- a/docs/2022-03/index.html +++ b/docs/2022-03/index.html @@ -34,7 +34,7 @@ $ ./ilri/check-duplicates.py -i /tmp/tac4.csv -db dspace -u dspace -p 'fuuu& $ csvcut -c id,filename ~/Downloads/2022-03-01-CGSpace-TAC-ICW-batch4-701-980.csv > /tmp/tac4-filenames.csv $ csvjoin -c id /tmp/2022-03-01-tac-batch4-701-980.csv /tmp/tac4-filenames.csv > /tmp/2022-03-01-tac-batch4-701-980-filenames.csv "/> - + diff --git a/docs/2022-04/index.html b/docs/2022-04/index.html index 4a4e17b5e..47c7a205d 100644 --- a/docs/2022-04/index.html +++ b/docs/2022-04/index.html @@ -18,7 +18,7 @@ - + diff --git a/docs/2022-05/index.html b/docs/2022-05/index.html index 3045dad5e..b6f890eb7 100644 --- a/docs/2022-05/index.html +++ b/docs/2022-05/index.html @@ -66,7 +66,7 @@ If I query Solr for time:2022-04* AND dns:*msnbot* AND dns:*.msn.com. I see a ha I purged 93,974 hits from these IPs using my check-spider-ip-hits.sh script "/> - + diff --git a/docs/2022-06/index.html b/docs/2022-06/index.html index 3720dfd8d..64122860e 100644 --- a/docs/2022-06/index.html +++ b/docs/2022-06/index.html @@ -48,7 +48,7 @@ There seem to be many more of these: "/> - + diff --git a/docs/2022-07/index.html b/docs/2022-07/index.html index ba936b3f5..1db05f9fd 100644 --- a/docs/2022-07/index.html +++ b/docs/2022-07/index.html @@ -34,7 +34,7 @@ Also, the trgm functions I’ve used before are case insensitive, but Levens "/> - + diff --git a/docs/2022-08/index.html b/docs/2022-08/index.html index 9ab514ed4..665d75895 100644 --- a/docs/2022-08/index.html +++ b/docs/2022-08/index.html @@ -24,7 +24,7 @@ Our request to add CC-BY-3.0-IGO to SPDX was approved a few weeks ago Our request to add CC-BY-3.0-IGO to SPDX was approved a few weeks ago "/> - + diff --git a/docs/2022-09/index.html b/docs/2022-09/index.html index b3efeb114..f75a429c9 100644 --- a/docs/2022-09/index.html +++ b/docs/2022-09/index.html @@ -46,7 +46,7 @@ I also fixed a few bugs and improved the region-matching logic "/> - + diff --git a/docs/2022-10/index.html b/docs/2022-10/index.html index 97201e64e..59b7e13d3 100644 --- a/docs/2022-10/index.html +++ b/docs/2022-10/index.html @@ -36,7 +36,7 @@ I filed an issue to ask about Java 11+ support "/> - + diff --git a/docs/2022-11/index.html b/docs/2022-11/index.html index b39c890cf..2b4f74a9f 100644 --- a/docs/2022-11/index.html +++ b/docs/2022-11/index.html @@ -44,7 +44,7 @@ I want to make sure they use groups instead of individuals where possible! I reverted the Cocoon autosave change because it was more of a nuissance that Peter can’t upload CSVs from the web interface and is a very low severity security issue "/> - + diff --git a/docs/2022-12/index.html b/docs/2022-12/index.html index e1c991fac..d66a011ae 100644 --- a/docs/2022-12/index.html +++ b/docs/2022-12/index.html @@ -20,7 +20,7 @@ Replace “East Asia” with “Eastern Asia” region on CGSpac - + @@ -36,7 +36,7 @@ I exported the CCAFS and IITA communities, extracted just the country and region Add a few more authors to my CSV with author names and ORCID identifiers and tag 283 items! Replace “East Asia” with “Eastern Asia” region on CGSpace (UN M.49 region) "/> - + @@ -46,9 +46,9 @@ Replace “East Asia” with “Eastern Asia” region on CGSpac "@type": "BlogPosting", "headline": "December, 2022", "url": "https://alanorth.github.io/cgspace-notes/2022-12/", - "wordCount": "376", + "wordCount": "617", "datePublished": "2022-12-01T08:52:36+03:00", - "dateModified": "2022-12-03T10:46:29+03:00", + "dateModified": "2022-12-04T03:19:49+03:00", "author": { "@type": "Person", "name": "Alan Orth" @@ -164,7 +164,7 @@ Replace “East Asia” with “Eastern Asia” region on CGSpac
$ sed -e 's_ / _\n_' -e 's_/_\n_' -e 's/ \?(.*)$//' ~/Downloads/2022-12-03-CLARISA-institutions.txt > ~/Downloads/2022-12-03-CLARISA-institutions-alan.txt
 
localhost/dspacetest= ☘ \COPY (SELECT DISTINCT text_value as "cg.contributor.affiliation", count(*) FROM metadatavalue WHERE dspace_object_id IN (SELECT uuid FROM item) AND metadata_field_id = 211 GROUP BY text_value ORDER BY count DESC LIMIT 1000) to /tmp/2022-11-22-affiliations.csv;