1
0
mirror of https://github.com/ilri/dspace-statistics-api.git synced 2025-05-10 23:26:02 +02:00

Compare commits

..

3 Commits

Author SHA1 Message Date
3327884f21 Update docs to remove SQLite stuff
I've decided to use PostgreSQL instead of SQLite because the UPSERT
support is available in versions of PostgreSQL we're alread running,
whereas SQLite needs a VERY new (3.24.0) version that is not avail-
able on any recent long-term support Ubuntu releases.
2018-09-25 00:56:01 +03:00
8f7450f67a Use PostgreSQL instead of SQLite
I was very surprised how easy and fast and robust SQLite was, but in
the end I realized that its UPSERT support only came in version 3.24
and both Ubuntu 16.04 and 18.04 have older versions than that! I did
manage to install libsqlite3-0 from Ubuntu 18.04 cosmic on my xenial
host, but that feels dirty.

PostgreSQL has support for UPSERT since 9.5, not to mention the same
nice LIMIT and OFFSET clauses.
2018-09-25 00:49:47 +03:00
28d61fb041 README.md: Add notes about Python and SQLite versions 2018-09-24 17:26:48 +03:00
7 changed files with 35 additions and 25 deletions

1
.gitignore vendored
View File

@ -1,3 +1,2 @@
__pycache__
venv
*.db

View File

@ -4,6 +4,10 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [0.2.0] - 2018-09-24
### Changed
- Use PostgreSQL instead of SQLite because UPSERT support needs a very new libsqlite3 whereas it's already in PostgreSQL 9.5+
## [0.1.0] - 2018-09-24
### Changed
- Rename project to "DSpace Statistics API"

View File

@ -1,14 +1,14 @@
# DSpace Statistics API
A quick and dirty REST API to expose Solr view and download statistics for items in a DSpace repository.
Written and tested in Python 3.6. SolrClient (0.2.1) does not currently run in Python 3.7.0.
Written and tested in Python 3.6. SolrClient (0.2.1) does not currently run in Python 3.7.0. Requires PostgreSQL version 9.5 or greater for [`UPSERT` support](https://wiki.postgresql.org/wiki/UPSERT).
## Installation
Create a virtual environment and run it:
$ virtualenv -p /usr/bin/python3.6 venv
$ . venv/bin/activate
$ pip install falcon gunicorn SolrClient
$ pip install falcon gunicorn SolrClient psycopg2-binary
$ gunicorn app:api
## Todo

5
app.py
View File

@ -2,11 +2,12 @@
# See DSpace Solr docs for tips about parameters
# https://wiki.duraspace.org/display/DSPACE/Solr
from database import database_connection_ro
from database import database_connection
import falcon
from solr import solr_connection
db = database_connection_ro()
db = database_connection()
db.set_session(readonly=True)
solr = solr_connection()
class AllItemsResource:

View File

@ -3,6 +3,9 @@ import os
# Check if Solr connection information was provided in the environment
SOLR_SERVER = os.environ.get('SOLR_SERVER', 'http://localhost:8080/solr')
SQLITE_DB = os.environ.get('SQLITE_DB', 'statistics.db')
DATABASE_NAME = os.environ.get('DATABASE_NAME', 'dspacestatistics')
DATABASE_USER = os.environ.get('DATABASE_USER', 'dspacestatistics')
DATABASE_PASS = os.environ.get('DATABASE_PASS', 'dspacestatistics')
DATABASE_HOST = os.environ.get('DATABASE_HOST', 'localhost')
# vim: set sw=4 ts=4 expandtab:

View File

@ -1,17 +1,11 @@
from config import SQLITE_DB
import sqlite3
from config import DATABASE_NAME
from config import DATABASE_USER
from config import DATABASE_PASS
from config import DATABASE_HOST
import psycopg2
def database_connection_rw():
connection = sqlite3.connect(SQLITE_DB)
# allow iterating over row results by column key
connection.row_factory = sqlite3.Row
return connection
def database_connection_ro():
connection = sqlite3.connect('file:{0}?mode=ro'.format(SQLITE_DB), uri=True)
# allow iterating over row results by column key
connection.row_factory = sqlite3.Row
def database_connection():
connection = psycopg2.connect("dbname={} user={} password={} host='{}'".format(DATABASE_NAME, DATABASE_USER, DATABASE_PASS, DATABASE_HOST))
return connection

View File

@ -20,7 +20,7 @@
# ---
#
# Connects to a DSpace Solr statistics core and ingests item views and downloads
# into a SQLite database for use with other applications (an API, for example).
# into a Postgres database for use with other applications (an API, for example).
#
# This script is written for Python 3 and requires several modules that you can
# install with pip (I recommend setting up a Python virtual environment first):
@ -32,7 +32,7 @@
#
# Tested with Python 3.5 and 3.6.
from database import database_connection_rw
from database import database_connection
from solr import solr_connection
def index_views():
@ -52,6 +52,8 @@ def index_views():
results_num_pages = round(results_numFound / results_per_page)
results_current_page = 0
cursor = db.cursor()
while results_current_page <= results_num_pages:
print('Page {0} of {1}.'.format(results_current_page, results_num_pages))
@ -70,7 +72,7 @@ def index_views():
views = res.get_facets()
# in this case iterate over the 'id' dict and get the item ids and views
for item_id, item_views in views['id'].items():
db.execute('''INSERT INTO items(id, views) VALUES(?, ?)
cursor.execute('''INSERT INTO items(id, views) VALUES(%s, %s)
ON CONFLICT(id) DO UPDATE SET downloads=excluded.views''',
(item_id, item_views))
@ -78,6 +80,8 @@ def index_views():
results_current_page += 1
cursor.close()
def index_downloads():
print("Populating database with item downloads.")
@ -95,6 +99,8 @@ def index_downloads():
results_num_pages = round(results_numFound / results_per_page)
results_current_page = 0
cursor = db.cursor()
while results_current_page <= results_num_pages:
print('Page {0} of {1}.'.format(results_current_page, results_num_pages))
@ -113,7 +119,7 @@ def index_downloads():
downloads = res.get_facets()
# in this case iterate over the 'owningItem' dict and get the item ids and downloads
for item_id, item_downloads in downloads['owningItem'].items():
db.execute('''INSERT INTO items(id, downloads) VALUES(?, ?)
cursor.execute('''INSERT INTO items(id, downloads) VALUES(%s, %s)
ON CONFLICT(id) DO UPDATE SET downloads=excluded.downloads''',
(item_id, item_downloads))
@ -121,11 +127,14 @@ def index_downloads():
results_current_page += 1
db = database_connection_rw()
cursor.close()
db = database_connection()
solr = solr_connection()
# create table to store item views and downloads
db.execute('''CREATE TABLE IF NOT EXISTS items
cursor = db.cursor()
cursor.execute('''CREATE TABLE IF NOT EXISTS items
(id INT PRIMARY KEY, views INT DEFAULT 0, downloads INT DEFAULT 0)''')
index_views()
index_downloads()