mirror of
https://github.com/ilri/dspace-statistics-api.git
synced 2024-11-22 14:25:01 +01:00
Refactor indexer
Move the get_statistics_shards() method to a utility module so it can be used by other things.
This commit is contained in:
parent
8e87f80e9a
commit
495386856b
@ -28,59 +28,12 @@
|
|||||||
#
|
#
|
||||||
# See: https://wiki.duraspace.org/display/DSPACE/Solr
|
# See: https://wiki.duraspace.org/display/DSPACE/Solr
|
||||||
|
|
||||||
import re
|
|
||||||
|
|
||||||
import psycopg2.extras
|
import psycopg2.extras
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
from .config import SOLR_SERVER
|
from .config import SOLR_SERVER
|
||||||
from .database import DatabaseManager
|
from .database import DatabaseManager
|
||||||
|
from .util import get_statistics_shards
|
||||||
|
|
||||||
# Enumerate the cores in Solr to determine if statistics have been sharded into
|
|
||||||
# yearly shards by DSpace's stats-util or not (for example: statistics-2018).
|
|
||||||
def get_statistics_shards():
|
|
||||||
# Initialize an empty list for statistics core years
|
|
||||||
statistics_core_years = []
|
|
||||||
|
|
||||||
# URL for Solr status to check active cores
|
|
||||||
solr_query_params = {"action": "STATUS", "wt": "json"}
|
|
||||||
solr_url = SOLR_SERVER + "/admin/cores"
|
|
||||||
res = requests.get(solr_url, params=solr_query_params)
|
|
||||||
|
|
||||||
if res.status_code == requests.codes.ok:
|
|
||||||
data = res.json()
|
|
||||||
|
|
||||||
# Iterate over active cores from Solr's STATUS response (cores are in
|
|
||||||
# the status array of this response).
|
|
||||||
for core in data["status"]:
|
|
||||||
# Pattern to match, for example: statistics-2018
|
|
||||||
pattern = re.compile("^statistics-[0-9]{4}$")
|
|
||||||
|
|
||||||
if not pattern.match(core):
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Append current core to list
|
|
||||||
statistics_core_years.append(core)
|
|
||||||
|
|
||||||
# Initialize a string to hold our shards (may end up being empty if the Solr
|
|
||||||
# core has not been processed by stats-util).
|
|
||||||
shards = str()
|
|
||||||
|
|
||||||
if len(statistics_core_years) > 0:
|
|
||||||
# Begin building a string of shards starting with the default one
|
|
||||||
shards = f"{SOLR_SERVER}/statistics"
|
|
||||||
|
|
||||||
for core in statistics_core_years:
|
|
||||||
# Create a comma-separated list of shards to pass to our Solr query
|
|
||||||
#
|
|
||||||
# See: https://wiki.apache.org/solr/DistributedSearch
|
|
||||||
shards += f",{SOLR_SERVER}/{core}"
|
|
||||||
|
|
||||||
# Return the string of shards, which may actually be empty. Solr doesn't
|
|
||||||
# seem to mind if the shards query parameter is empty and I haven't seen
|
|
||||||
# any negative performance impact so this should be fine.
|
|
||||||
return shards
|
|
||||||
|
|
||||||
|
|
||||||
def index_views():
|
def index_views():
|
||||||
|
49
dspace_statistics_api/util.py
Normal file
49
dspace_statistics_api/util.py
Normal file
@ -0,0 +1,49 @@
|
|||||||
|
# Enumerate the cores in Solr to determine if statistics have been sharded into
|
||||||
|
# yearly shards by DSpace's stats-util or not (for example: statistics-2018).
|
||||||
|
def get_statistics_shards():
|
||||||
|
from .config import SOLR_SERVER
|
||||||
|
|
||||||
|
import re
|
||||||
|
import requests
|
||||||
|
|
||||||
|
# Initialize an empty list for statistics core years
|
||||||
|
statistics_core_years = []
|
||||||
|
|
||||||
|
# URL for Solr status to check active cores
|
||||||
|
solr_query_params = {"action": "STATUS", "wt": "json"}
|
||||||
|
solr_url = SOLR_SERVER + "/admin/cores"
|
||||||
|
res = requests.get(solr_url, params=solr_query_params)
|
||||||
|
|
||||||
|
if res.status_code == requests.codes.ok:
|
||||||
|
data = res.json()
|
||||||
|
|
||||||
|
# Iterate over active cores from Solr's STATUS response (cores are in
|
||||||
|
# the status array of this response).
|
||||||
|
for core in data["status"]:
|
||||||
|
# Pattern to match, for example: statistics-2018
|
||||||
|
pattern = re.compile("^statistics-[0-9]{4}$")
|
||||||
|
|
||||||
|
if not pattern.match(core):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Append current core to list
|
||||||
|
statistics_core_years.append(core)
|
||||||
|
|
||||||
|
# Initialize a string to hold our shards (may end up being empty if the Solr
|
||||||
|
# core has not been processed by stats-util).
|
||||||
|
shards = str()
|
||||||
|
|
||||||
|
if len(statistics_core_years) > 0:
|
||||||
|
# Begin building a string of shards starting with the default one
|
||||||
|
shards = f"{SOLR_SERVER}/statistics"
|
||||||
|
|
||||||
|
for core in statistics_core_years:
|
||||||
|
# Create a comma-separated list of shards to pass to our Solr query
|
||||||
|
#
|
||||||
|
# See: https://wiki.apache.org/solr/DistributedSearch
|
||||||
|
shards += f",{SOLR_SERVER}/{core}"
|
||||||
|
|
||||||
|
# Return the string of shards, which may actually be empty. Solr doesn't
|
||||||
|
# seem to mind if the shards query parameter is empty and I haven't seen
|
||||||
|
# any negative performance impact so this should be fine.
|
||||||
|
return shards
|
Loading…
Reference in New Issue
Block a user