mirror of
https://github.com/internetarchive/warcprox.git
synced 2025-01-18 13:22:09 +01:00
Merge branch 'limit_revisits' into qa
This commit is contained in:
commit
40ef6fc186
@ -78,7 +78,6 @@ class LimitRevisitsPGMixin():
|
|||||||
self.options.base32)
|
self.options.base32)
|
||||||
digest = digest[5:] if digest.startswith(b'sha1:') else digest
|
digest = digest[5:] if digest.startswith(b'sha1:') else digest
|
||||||
hash_plus_url = b"".join([digest, recorded_url.url]).decode()
|
hash_plus_url = b"".join([digest, recorded_url.url]).decode()
|
||||||
|
|
||||||
if not revisit_key:
|
if not revisit_key:
|
||||||
# use ait-job-id if available
|
# use ait-job-id if available
|
||||||
if (
|
if (
|
||||||
@ -90,7 +89,7 @@ class LimitRevisitsPGMixin():
|
|||||||
else:
|
else:
|
||||||
revisit_key = '__unspecified__'
|
revisit_key = '__unspecified__'
|
||||||
|
|
||||||
query = "SELECT exists(SELECT 1 FROM crawl_revisits WHERE hash_plus_url = %s LIMIT 1);"
|
query = "SELECT exists(SELECT 1 FROM crawl_revisits WHERE hash_plus_url = %s and crawl_id = %s LIMIT 1);"
|
||||||
|
|
||||||
try:
|
try:
|
||||||
conn = psycopg2.connect(self.datasource)
|
conn = psycopg2.connect(self.datasource)
|
||||||
@ -99,14 +98,14 @@ class LimitRevisitsPGMixin():
|
|||||||
return False
|
return False
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
try:
|
try:
|
||||||
cur.execute(query, (hash_plus_url,))
|
cur.execute(query, (hash_plus_url, revisit_key))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.logger.warning("exception querying for %s in %s: %s", hash_plus_url, revisit_key, e)
|
self.logger.warning("exception querying for %s in %s: %s", digest, revisit_key, e)
|
||||||
return False
|
return False
|
||||||
result = cur.fetchone()
|
result = cur.fetchone()
|
||||||
|
|
||||||
if result and result == (True, ):
|
if result and result == (True, ):
|
||||||
logging.info("skipping revisit for url %s and hash %s", recorded_url.url, hash)
|
logging.info("skipping revisit for url %s and hash %s", recorded_url.url, digest)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
query = "INSERT INTO crawl_revisits (crawl_id, hash_plus_url) VALUES (%s, %s);"
|
query = "INSERT INTO crawl_revisits (crawl_id, hash_plus_url) VALUES (%s, %s);"
|
||||||
@ -114,7 +113,7 @@ class LimitRevisitsPGMixin():
|
|||||||
cur.execute(query, (revisit_key, hash_plus_url))
|
cur.execute(query, (revisit_key, hash_plus_url))
|
||||||
conn.commit()
|
conn.commit()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.logger.warning("exception inserting %s in %s: %s", hash_plus_url, revisit_key, e)
|
self.logger.warning("exception inserting %s in %s for %s: %s", digest, revisit_key, recorded_url.url, e)
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user