1
0
mirror of https://github.com/ilri/dspace-statistics-api.git synced 2025-05-11 15:46:06 +02:00

Compare commits

..

10 Commits

Author SHA1 Message Date
87dbb6c4df CHANGELOG.md: Release version 0.2.1 2018-09-25 02:21:44 +03:00
3160c44566 app.py: Remove comment
This comment was added when I first began the application and the
testing status is documented in the README now.
2018-09-25 02:20:51 +03:00
4b72f626d9 Update string substitution format
Instead of doing numbered strings I will just depend on the order,
at least to be consistent.
2018-09-25 02:19:29 +03:00
2d3b7620e3 CHANGELOG.md: Add note about psycopg2.extras.DictCursor 2018-09-25 02:08:54 +03:00
6e4bc630f7 database.py: Use psycopg2.extras.DictCursor
This allows us to access records using their column name. I didn't
notice that this was not working, as I had been testing the wrong
server!

See: http://initd.org/psycopg/docs/extras.html
2018-09-25 02:06:29 +03:00
44884140e5 CHANGELOG.md: Add new unreleased changes 2018-09-25 01:11:37 +03:00
74ff86ee3b contrib: Update environment settings in system units 2018-09-25 01:10:14 +03:00
3327884f21 Update docs to remove SQLite stuff
I've decided to use PostgreSQL instead of SQLite because the UPSERT
support is available in versions of PostgreSQL we're alread running,
whereas SQLite needs a VERY new (3.24.0) version that is not avail-
able on any recent long-term support Ubuntu releases.
2018-09-25 00:56:01 +03:00
8f7450f67a Use PostgreSQL instead of SQLite
I was very surprised how easy and fast and robust SQLite was, but in
the end I realized that its UPSERT support only came in version 3.24
and both Ubuntu 16.04 and 18.04 have older versions than that! I did
manage to install libsqlite3-0 from Ubuntu 18.04 cosmic on my xenial
host, but that feels dirty.

PostgreSQL has support for UPSERT since 9.5, not to mention the same
nice LIMIT and OFFSET clauses.
2018-09-25 00:49:47 +03:00
28d61fb041 README.md: Add notes about Python and SQLite versions 2018-09-24 17:26:48 +03:00
9 changed files with 53 additions and 34 deletions

1
.gitignore vendored
View File

@ -1,3 +1,2 @@
__pycache__
venv
*.db

View File

@ -4,6 +4,15 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [0.2.1] - 2018-09-24
### Changed
- Environment settings in example systemd unit files
- Use psycopg2.extras.DictCursor for PostgreSQL connection
## [0.2.0] - 2018-09-24
### Changed
- Use PostgreSQL instead of SQLite because UPSERT support needs a very new libsqlite3 whereas it's already in PostgreSQL 9.5+
## [0.1.0] - 2018-09-24
### Changed
- Rename project to "DSpace Statistics API"

View File

@ -1,14 +1,14 @@
# DSpace Statistics API
A quick and dirty REST API to expose Solr view and download statistics for items in a DSpace repository.
Written and tested in Python 3.6. SolrClient (0.2.1) does not currently run in Python 3.7.0.
Written and tested in Python 3.6. SolrClient (0.2.1) does not currently run in Python 3.7.0. Requires PostgreSQL version 9.5 or greater for [`UPSERT` support](https://wiki.postgresql.org/wiki/UPSERT).
## Installation
Create a virtual environment and run it:
$ virtualenv -p /usr/bin/python3.6 venv
$ . venv/bin/activate
$ pip install falcon gunicorn SolrClient
$ pip install falcon gunicorn SolrClient psycopg2-binary
$ gunicorn app:api
## Todo

13
app.py
View File

@ -1,12 +1,9 @@
# Tested with Python 3.6
# See DSpace Solr docs for tips about parameters
# https://wiki.duraspace.org/display/DSPACE/Solr
from database import database_connection_ro
from database import database_connection
import falcon
from solr import solr_connection
db = database_connection_ro()
db = database_connection()
db.set_session(readonly=True)
solr = solr_connection()
class AllItemsResource:
@ -24,7 +21,7 @@ class AllItemsResource:
pages = round(cursor.fetchone()[0] / limit)
# get statistics, ordered by id, and use limit and offset to page through results
cursor.execute('SELECT id, views, downloads FROM items ORDER BY id ASC LIMIT {0} OFFSET {1}'.format(limit, offset))
cursor.execute('SELECT id, views, downloads FROM items ORDER BY id ASC LIMIT {} OFFSET {}'.format(limit, offset))
results = cursor.fetchmany(limit)
cursor.close()
@ -49,7 +46,7 @@ class ItemResource:
"""Handles GET requests"""
cursor = db.cursor()
cursor.execute('SELECT views, downloads FROM items WHERE id={0}'.format(item_id))
cursor.execute('SELECT views, downloads FROM items WHERE id={}'.format(item_id))
results = cursor.fetchone()
cursor.close()

View File

@ -3,6 +3,9 @@ import os
# Check if Solr connection information was provided in the environment
SOLR_SERVER = os.environ.get('SOLR_SERVER', 'http://localhost:8080/solr')
SQLITE_DB = os.environ.get('SQLITE_DB', 'statistics.db')
DATABASE_NAME = os.environ.get('DATABASE_NAME', 'dspacestatistics')
DATABASE_USER = os.environ.get('DATABASE_USER', 'dspacestatistics')
DATABASE_PASS = os.environ.get('DATABASE_PASS', 'dspacestatistics')
DATABASE_HOST = os.environ.get('DATABASE_HOST', 'localhost')
# vim: set sw=4 ts=4 expandtab:

View File

@ -3,7 +3,10 @@ Description=DSpace Statistics API
After=network.target
[Service]
Environment=SOLR_SERVER=http://localhost:8081/solr
Environment=DATABASE_NAME=dspacestatistics
Environment=DATABASE_USER=dspacestatistics
Environment=DATABASE_PASS=dspacestatistics
Environment=DATABASE_HOST=localhost
User=nobody
Group=nogroup
WorkingDirectory=/opt/ilri/dspace-statistics-api

View File

@ -4,6 +4,10 @@ After=tomcat7.target
[Service]
Environment=SOLR_SERVER=http://localhost:8081/solr
Environment=DATABASE_NAME=dspacestatistics
Environment=DATABASE_USER=dspacestatistics
Environment=DATABASE_PASS=dspacestatistics
Environment=DATABASE_HOST=localhost
User=nobody
Group=nogroup
WorkingDirectory=/opt/ilri/dspace-statistics-api

View File

@ -1,17 +1,12 @@
from config import SQLITE_DB
import sqlite3
from config import DATABASE_NAME
from config import DATABASE_USER
from config import DATABASE_PASS
from config import DATABASE_HOST
import psycopg2
import psycopg2.extras
def database_connection_rw():
connection = sqlite3.connect(SQLITE_DB)
# allow iterating over row results by column key
connection.row_factory = sqlite3.Row
return connection
def database_connection_ro():
connection = sqlite3.connect('file:{0}?mode=ro'.format(SQLITE_DB), uri=True)
# allow iterating over row results by column key
connection.row_factory = sqlite3.Row
def database_connection():
connection = psycopg2.connect("dbname={} user={} password={} host='{}'".format(DATABASE_NAME, DATABASE_USER, DATABASE_PASS, DATABASE_HOST), cursor_factory=psycopg2.extras.DictCursor)
return connection

View File

@ -20,7 +20,7 @@
# ---
#
# Connects to a DSpace Solr statistics core and ingests item views and downloads
# into a SQLite database for use with other applications (an API, for example).
# into a Postgres database for use with other applications (an API, for example).
#
# This script is written for Python 3 and requires several modules that you can
# install with pip (I recommend setting up a Python virtual environment first):
@ -32,7 +32,7 @@
#
# Tested with Python 3.5 and 3.6.
from database import database_connection_rw
from database import database_connection
from solr import solr_connection
def index_views():
@ -52,8 +52,10 @@ def index_views():
results_num_pages = round(results_numFound / results_per_page)
results_current_page = 0
cursor = db.cursor()
while results_current_page <= results_num_pages:
print('Page {0} of {1}.'.format(results_current_page, results_num_pages))
print('Page {} of {}.'.format(results_current_page, results_num_pages))
res = solr.query('statistics', {
'q':'type:2',
@ -70,7 +72,7 @@ def index_views():
views = res.get_facets()
# in this case iterate over the 'id' dict and get the item ids and views
for item_id, item_views in views['id'].items():
db.execute('''INSERT INTO items(id, views) VALUES(?, ?)
cursor.execute('''INSERT INTO items(id, views) VALUES(%s, %s)
ON CONFLICT(id) DO UPDATE SET downloads=excluded.views''',
(item_id, item_views))
@ -78,6 +80,8 @@ def index_views():
results_current_page += 1
cursor.close()
def index_downloads():
print("Populating database with item downloads.")
@ -95,8 +99,10 @@ def index_downloads():
results_num_pages = round(results_numFound / results_per_page)
results_current_page = 0
cursor = db.cursor()
while results_current_page <= results_num_pages:
print('Page {0} of {1}.'.format(results_current_page, results_num_pages))
print('Page {} of {}.'.format(results_current_page, results_num_pages))
res = solr.query('statistics', {
'q':'type:0',
@ -113,7 +119,7 @@ def index_downloads():
downloads = res.get_facets()
# in this case iterate over the 'owningItem' dict and get the item ids and downloads
for item_id, item_downloads in downloads['owningItem'].items():
db.execute('''INSERT INTO items(id, downloads) VALUES(?, ?)
cursor.execute('''INSERT INTO items(id, downloads) VALUES(%s, %s)
ON CONFLICT(id) DO UPDATE SET downloads=excluded.downloads''',
(item_id, item_downloads))
@ -121,11 +127,14 @@ def index_downloads():
results_current_page += 1
db = database_connection_rw()
cursor.close()
db = database_connection()
solr = solr_connection()
# create table to store item views and downloads
db.execute('''CREATE TABLE IF NOT EXISTS items
cursor = db.cursor()
cursor.execute('''CREATE TABLE IF NOT EXISTS items
(id INT PRIMARY KEY, views INT DEFAULT 0, downloads INT DEFAULT 0)''')
index_views()
index_downloads()