mirror of
https://github.com/internetarchive/warcprox.git
synced 2025-01-18 13:22:09 +01:00
more updates qa prototyping
This commit is contained in:
parent
ef75164f8b
commit
dfc34e7561
@ -62,21 +62,21 @@ class DedupableMixin(object):
|
||||
else:
|
||||
return recorded_url.response_recorder.payload_size() > self.min_binary_size
|
||||
|
||||
class LimitRevisitsPGMixin(object):
|
||||
class LimitRevisitsPGMixin():
|
||||
"""
|
||||
Limit revisits recorded to one per revisit_key
|
||||
"""
|
||||
def __init__(self, options=warcprox.Options()):
|
||||
def __init__(self):
|
||||
self.datasource = "postgresql://archiveit@db.qa-archive-it.org/archiveit" # "postgresql://user@db_host/db_name"
|
||||
self.datatable = "crawl_revisits" # postgres table in db_name
|
||||
|
||||
def limit_revisits(self, recorded_url, hash_plus_url=None, revisit_key=None):
|
||||
# tracks revisits, returns True when we've seen revisit before, else False
|
||||
if not hash_plus_url:
|
||||
hash_plus_url = b"".join(
|
||||
(warcprox.digest_str(recorded_url.payload_digest,
|
||||
self.options.base32),
|
||||
recorded_url.url)
|
||||
).decode()
|
||||
digest = warcprox.digest_str(recorded_url.payload_digest,
|
||||
self.options.base32)
|
||||
digest = digest[5:] if digest.startswith(b'sha1:') else digest
|
||||
hash_plus_url = b"".join([digest, recorded_url.url]).decode()
|
||||
if not revisit_key:
|
||||
# use ait-job-id if available
|
||||
if (
|
||||
@ -86,7 +86,7 @@ class LimitRevisitsPGMixin(object):
|
||||
):
|
||||
revisit_key = recorded_url.warcprox_meta["metadata"]["ait-job-id"]
|
||||
else:
|
||||
revisit_key = 'all'
|
||||
revisit_key = '__unspecified__'
|
||||
|
||||
query = "SELECT exists(SELECT 1 FROM crawl_revisits WHERE hash_plus_url = %s LIMIT 1);"
|
||||
|
||||
@ -102,18 +102,17 @@ class LimitRevisitsPGMixin(object):
|
||||
self.logger.warning("exception querying for %s in %s: %s", hash_plus_url, revisit_key, e)
|
||||
return False
|
||||
result = cur.fetchone()
|
||||
if result[0]:
|
||||
logging.info("result[0]: %s", result[0])
|
||||
|
||||
if result[0] and result[0] == True:
|
||||
if result and result == (True, ):
|
||||
logging.info("skipping revisit for url %s and hash %s", recorded_url.url, hash)
|
||||
return True
|
||||
else:
|
||||
query = "INSERT INTO crawl_revisits (crawl_id, hash_plus_url) VALUES (%s, %s);"
|
||||
try:
|
||||
cur.execute(query, (revisit_key, hash_plus_url))
|
||||
except Exception as e:
|
||||
self.logger.warning("exception inserting %s in %s: %s", hash_plus_url, revisit_key, e)
|
||||
|
||||
query = "INSERT INTO crawl_revisits (crawl_id, hash_plus_url) VALUES (%s, %s);"
|
||||
try:
|
||||
cur.execute(query, (revisit_key, hash_plus_url))
|
||||
conn.commit()
|
||||
except Exception as e:
|
||||
self.logger.warning("exception inserting %s in %s: %s", hash_plus_url, revisit_key, e)
|
||||
|
||||
return False
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user