Skip to content

Commit

Permalink
Merge branch 'master' of github.com:LMFDB/lmfdb
Browse files Browse the repository at this point in the history
  • Loading branch information
SamSchiavone committed Sep 29, 2023
2 parents 40e23fc + dba2eeb commit 44ad0ea
Show file tree
Hide file tree
Showing 26 changed files with 129 additions and 94 deletions.
4 changes: 2 additions & 2 deletions lmfdb/api/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,13 +111,13 @@ def split_db(tablename):
if not sizes['toast_bytes']:
sizes['toast_bytes'] = 0
if sizes['nrows']:
avg_size = int(round(float(sizes['table_bytes'] + sizes['toast_bytes'] + sizes['extra_bytes']) / sizes['nrows']))
avg_size = int(round(float(sizes['table_bytes'] + sizes['toast_bytes'] + sizes['extras_bytes']) / sizes['nrows']))
else:
avg_size = 0
stats[tablename] = {
'db':dname, 'table':link, 'dbSize':dbSize[dname], 'dbObjects':dbObjects[dname],
'size': csize, 'avgObjSize':avg_size,
'indexSize':mb(sizes['index_bytes']), 'dataSize':mb(sizes['table_bytes'] + sizes['toast_bytes'] + sizes['extra_bytes']),
'indexSize':mb(sizes['index_bytes']), 'dataSize':mb(sizes['table_bytes'] + sizes['toast_bytes'] + sizes['extras_bytes']),
'countsSize':mb(sizes['counts_bytes']), 'statsSize':mb(sizes['stats_bytes']),
'nrows': sizes['nrows'], 'nstats': sizes['nstats'], 'ncounts': sizes['ncounts']}
dataSize = size - indexSize
Expand Down
2 changes: 1 addition & 1 deletion lmfdb/artin_representations/math_classes.py
Original file line number Diff line number Diff line change
Expand Up @@ -217,7 +217,7 @@ def projective_group(self):
if groupid[0]:
label = f"{groupid[0]}.{groupid[1]}"
if self._knowl_cache is None:
name = db.gps_groups_test.lookup(label, "tex_name")
name = db.gps_groups.lookup(label, "tex_name")
else:
name = self._knowl_cache.get(label, {}).get("tex_name")
if name:
Expand Down
8 changes: 7 additions & 1 deletion lmfdb/backend/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -377,9 +377,15 @@ def _table_exists(self, tablename):
- ``tablename`` -- a string, the name of the table
"""
cur = self._execute(SQL("SELECT 1 from pg_tables where tablename=%s"), [tablename], silent=True)
cur = self._execute(SQL("SELECT 1 FROM pg_tables where tablename=%s"), [tablename], silent=True)
return cur.fetchone() is not None

def _all_tablenames(self):
"""
Return all (postgres) table names in the database
"""
return [rec[0] for rec in self._execute(SQL("SELECT tablename FROM pg_tables ORDER BY tablename"), silent=True)]

def _get_locks(self):
return self._execute(SQL(
"SELECT t.relname, l.mode, l.pid, age(clock_timestamp(), a.backend_start) "
Expand Down
33 changes: 15 additions & 18 deletions lmfdb/backend/table.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import os
import tempfile
import time
import re

from psycopg2.sql import SQL, Identifier, Placeholder, Literal

Expand Down Expand Up @@ -1913,34 +1914,30 @@ def cleanup_from_reload(self, keep_old=0):
"""
to_remove = []
to_swap = []
tablenames = [name for name in self._all_tablenames() if name.startswith(self.search_table)]
for suffix in ["", "_extras", "_stats", "_counts"]:
head = self.search_table + suffix
tablename = head + "_tmp"
if self._table_exists(tablename):
if tablename in tablenames:
to_remove.append(tablename)
backup_number = 1
tails = []
while True:
tail = "_old{0}".format(backup_number)
tablename = head + tail
if self._table_exists(tablename):
tails.append(tail)
else:
break
backup_number += 1
olds = []
for name in tablenames:
m = re.fullmatch(head + r"_old(\d+)", name)
if m:
olds.append(int(m.group(1)))
olds.sort()
if keep_old > 0:
for new_number, tail in enumerate(tails[-keep_old:], 1):
newtail = "_old{0}".format(new_number)
if newtail != tail: # we might be keeping everything
to_swap.append((head, tail, newtail))
tails = tails[:-keep_old]
to_remove.extend([head + tail for tail in tails])
for new_number, n in enumerate(olds[-keep_old:], 1):
if n != new_number:
to_swap.append((head, n, new_number))
olds = olds[:-keep_old]
to_remove.extend([head + f"_old{n}" for n in olds])
with DelayCommit(self, silence=True):
for table in to_remove:
self._execute(SQL("DROP TABLE {0}").format(Identifier(table)))
print("Dropped {0}".format(table))
for head, cur_tail, new_tail in to_swap:
self._swap([head], cur_tail, new_tail)
self._swap([head], f"_old{cur_tail}", f"_old{new_tail}")
print("Swapped {0} to {1}".format(head + cur_tail, head + new_tail))

def max_id(self, table=None):
Expand Down
2 changes: 1 addition & 1 deletion lmfdb/characters/web_character.py
Original file line number Diff line number Diff line change
Expand Up @@ -738,7 +738,7 @@ def structure_group_knowl(self):
for v in parts.values():
v.sort()
primary = sum((parts[p] for p in sorted(parts)), [])
dblabel = db.gps_groups_test.lucky({"abelian": True, "primary_abelian_invariants": primary}, "label")
dblabel = db.gps_groups.lucky({"abelian": True, "primary_abelian_invariants": primary}, "label")
if dblabel is None:
abgp_url = url_for('abstract.by_abelian_label', label=label)
return f'<a href= %s >{self.structure}</a>' % abgp_url
Expand Down
8 changes: 7 additions & 1 deletion lmfdb/classical_modular_forms/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -530,6 +530,8 @@ def by_url_space_label(level, weight, char_orbit_label):
@cmf.route("/<int:level>/<int:weight>/<int:conrey_index>/")
def by_url_space_conreylabel(level, weight, conrey_index):
label = convert_spacelabel_from_conrey(str(level)+"."+str(weight)+"."+str(conrey_index))
if label is None:
return abort(404, "Invalid space label: not relatively prime")
return redirect(url_for_label(label), code=301)

@cmf.route("/<int:level>/<int:weight>/<char_orbit_label>/<hecke_orbit>/")
Expand Down Expand Up @@ -593,7 +595,11 @@ def jump_box(info):
jump = info.pop("jump").strip()
errmsg = None
if OLD_SPACE_LABEL_RE.match(jump):
jump = convert_spacelabel_from_conrey(jump)
newjump = convert_spacelabel_from_conrey(jump)
if newjump is None:
errmsg = "%s is not a valid space label"
else:
jump = newjump
#handle direct trace_hash search
if re.match(r'^\#\d+$', jump) and ZZ(jump[1:]) < 2**61:
label = db.mf_newforms.lucky({'trace_hash': ZZ(jump[1:].strip())}, projection="label")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,9 @@
<a name="coefficient_data"></a>
<h2>Coefficient data</h2>

For each \(n\) we display the coefficients of the \(q\)-expansion \(a_n\), the
<p>For each \(n\) we display the coefficients of the \(q\)-expansion \(a_n\), the
{{ KNOWL('cmf.satake_parameters',title='Satake parameters') }} \(\alpha_p\),
and the Satake angles \(\theta_p = \textrm{Arg}(\alpha_p)\).
and the Satake angles \(\theta_p = \textrm{Arg}(\alpha_p)\).</p>


<script>
Expand Down
4 changes: 2 additions & 2 deletions lmfdb/classical_modular_forms/web_newform.py
Original file line number Diff line number Diff line change
Expand Up @@ -557,7 +557,7 @@ def projective_image_latex(self):
def projective_image_knowl(self):
if self.projective_image:
gp_name = "C2^2" if self.projective_image == "D2" else ( "S3" if self.projective_image == "D3" else self.projective_image )
gp_label = db.gps_groups_test.lucky({'name':gp_name}, 'label')
gp_label = db.gps_groups.lucky({'name':gp_name}, 'label')
gp_display = fr'\({self.projective_image_latex}\)'
return gp_display if gp_label is None else abstract_group_display_knowl(gp_label, gp_display)

Expand Down Expand Up @@ -604,7 +604,7 @@ def projective_field_display(self):
@property
def artin_image_display(self):
if self.artin_image:
pretty = db.gps_groups_test.lookup(self.artin_image, 'tex_name')
pretty = db.gps_groups.lookup(self.artin_image, 'tex_name')
return pretty if pretty else self.artin_image

def artin_image_knowl(self):
Expand Down
5 changes: 4 additions & 1 deletion lmfdb/classical_modular_forms/web_space.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,10 @@ def convert_spacelabel_from_conrey(spacelabel_conrey):
e.g. 23.2.22 -> 23.2.b (because 23.b is the character orbit label of the Conrey character 23.22)
"""
N, k, n = map(int, spacelabel_conrey.split('.'))
return db.mf_newspaces.lucky({'conrey_index': ConreyCharacter(N,n).min_conrey_conj, 'level': N, 'weight': k}, projection='label')
try:
return db.mf_newspaces.lucky({'conrey_index': ConreyCharacter(N,n).min_conrey_conj, 'level': N, 'weight': k}, projection='label')
except AssertionError: # N and n not relatively prime
pass


def trace_expansion_generic(space, prec_max=10):
Expand Down
16 changes: 14 additions & 2 deletions lmfdb/elliptic_curves/elliptic_curve.py
Original file line number Diff line number Diff line change
Expand Up @@ -758,7 +758,7 @@ def EC_data(label):
if match_lmfdb_label(label):
conductor, iso_class, number = split_lmfdb_label(label)
if not number: # isogeny class
return datapage(label, ["ec_classdata", "ec_padic"], bread=bread, label_col="lmfdb_iso", sorts=[[], ["p"]])
return datapage(label, ["ec_classdata", "ec_padic", "ec_curvedata"], title=f"Elliptic curve isogeny class data - {label}", bread=bread, label_cols=["lmfdb_iso", "lmfdb_iso", "lmfdb_iso"], sorts=[[], ["p"], ['conductor', 'iso_nlabel', 'lmfdb_number']])
iso_label = class_lmfdb_label(conductor, iso_class)
labels = [label] * 8
label_cols = ["lmfdb_label"] * 8
Expand Down Expand Up @@ -1012,13 +1012,25 @@ def modm_reduce():
return "\\text{Invalid input, please enter a positive integer}"

galois_level = data['adelic_level']
modm_images = data['modm_images']
modm_level_index = [image.split('.')[:2] for image in modm_images]
if modm_level_index:
relevant_m = gcd(new_mod, int(modm_level_index[-1][0]))
index = '1' # the case where level gcd is 1
for level_index in reversed(modm_level_index):
if (relevant_m % int(level_index[0]) == 0):
index = level_index[1]
break
else:
# should not happen if adelic image is computed
index = '-1'

ans = gl2_lift(galois_image, galois_level, new_mod)
if ans == []:
result = "\\text{trivial group}"
else:
result = ",".join([str(latex(dispZmat_from_list(z,2))) for z in ans])
result += '.' + str(new_mod) + '.' + str(ans) + '.' + cur_lang
result += '.' + str(new_mod) + '.' + str(ans) + '.' + cur_lang + '.' + index
return result

def gl1_gen(M):
Expand Down
3 changes: 2 additions & 1 deletion lmfdb/elliptic_curves/isog_class.py
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,8 @@ def perm(i): return next(c for c in self.curves if c['Cnumber']==i+1)['lmfdb_num
self.properties += [('Graph', ''),(None, self.graph_link)]

self.downloads = [('q-expansion to text', url_for(".download_EC_qexp", label=self.lmfdb_iso, limit=1000)),
('All stored data to text', url_for(".download_EC_all", label=self.lmfdb_iso))]
('All stored data to text', url_for(".download_EC_all", label=self.lmfdb_iso)),
('Underlying data', url_for(".EC_data", label=self.lmfdb_iso))]

self.bread = [('Elliptic curves', url_for("ecnf.index")),
(r'$\Q$', url_for(".rational_elliptic_curves")),
Expand Down
8 changes: 6 additions & 2 deletions lmfdb/galois_groups/transitive_group.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@ def display_short(self, emptyifnotpretty=False):
if self._data.get('pretty') is not None:
return self._data['pretty']
gp_label = self.abstract_label()
group = db.gps_groups_test.lookup(gp_label)
group = db.gps_groups.lookup(gp_label)
if group and group.get('tex_name'):
return f"${group['tex_name']}$"
if emptyifnotpretty:
Expand Down Expand Up @@ -592,9 +592,13 @@ def group_alias_table():
ans += r'</tbody></table>'
return ans

def nt2abstract(n, t):
def nt2abstract(n, t, output="pair"):
res = db.gps_transitive.lookup('{}T{}'.format(n,t))
if res and 'abstract_label' in res:
if output == "pair":
gapid = res['abstract_label'].split('.')
return [int(z) for z in gapid]
# Otherwise output abstract group label
return res['abstract_label']
raise NameError('Abstract group id not found')

Expand Down
2 changes: 1 addition & 1 deletion lmfdb/groups/abstract/groups_test_pages.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ def abstract_groups_of_order(self, N, imin, imax):
errors = []
res = []
n = 0
for label in self.db.gps_groups_test.search({"order": N, "counter": {"$gte": imin, "$lte": imax}}, "label"):
for label in self.db.gps_groups.search({"order": N, "counter": {"$gte": imin, "$lte": imax}}, "label"):
n += 1
load, url = self.abstract_group(label)
if load is None:
Expand Down
26 changes: 13 additions & 13 deletions lmfdb/groups/abstract/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -609,7 +609,7 @@ def index():
("perfect=yes", "perfect"),
("rational=yes", "rational"),
]
info["maxgrp"] = db.gps_groups_test.max("order")
info["maxgrp"] = db.gps_groups.max("order")

return render_template(
"abstract-index.html",
Expand Down Expand Up @@ -648,7 +648,7 @@ def dynamic_statistics():

@abstract_page.route("/random")
def random_abstract_group():
label = db.gps_groups_test.random(projection="label")
label = db.gps_groups.random(projection="label")
response = make_response(redirect(url_for(".by_label", label=label), 307))
response.headers["Cache-Control"] = "no-cache, no-store"
return response
Expand All @@ -658,7 +658,7 @@ def random_abstract_group():
def interesting():
return interesting_knowls(
"group.abstract",
db.gps_groups_test,
db.gps_groups,
url_for_label,
title="Some interesting groups",
bread=get_bread([("Interesting", " ")]),
Expand Down Expand Up @@ -711,7 +711,7 @@ def by_abelian_label(label):
# Avoid database error on a hopeless search
dblabel = None
if not [z for z in primary if z>2**31-1]:
dblabel = db.gps_groups_test.lucky(
dblabel = db.gps_groups.lucky(
{"abelian": True, "primary_abelian_invariants": primary}, "label"
)
if dblabel is None:
Expand Down Expand Up @@ -850,7 +850,7 @@ def group_jump(info):
invs = [n.strip() for n in jump.upper().replace("C", "").replace("X", "*").replace("^", "_").split("*")]
return redirect(url_for(".by_abelian_label", label=".".join(invs)))
# by name
labs = db.gps_groups_test.search({"name":jump.replace(" ", "")}, projection="label", limit=2)
labs = db.gps_groups.search({"name":jump.replace(" ", "")}, projection="label", limit=2)
if len(labs) == 1:
return redirect(url_for(".by_label", label=labs[0]))
elif len(labs) == 2:
Expand Down Expand Up @@ -905,7 +905,7 @@ def get_sub_url(label):
return url_for(".by_subgroup_label", label=label)

class Group_download(Downloader):
table = db.gps_groups_test
table = db.gps_groups
title = "Abstract groups"
columns = "label"
column_wrappers = { "label" : lambda x : [int(a) for a in x.split(".")] }
Expand All @@ -925,7 +925,7 @@ def group_postprocess(res, info, query):
label = rec.get(col)
if label is not None:
labels.add(label)
tex_cache = {rec["label"]: rec["tex_name"] for rec in db.gps_groups_test.search({"label":{"$in":list(labels)}}, ["label", "tex_name"])}
tex_cache = {rec["label"]: rec["tex_name"] for rec in db.gps_groups.search({"label":{"$in":list(labels)}}, ["label", "tex_name"])}
for rec in res:
rec["tex_cache"] = tex_cache
return res
Expand Down Expand Up @@ -972,7 +972,7 @@ def group_postprocess(res, info, query):
default=True, align="center")])

@search_wrap(
table=db.gps_groups_test,
table=db.gps_groups,
title="Abstract group search results",
err_title="Abstract groups search input error",
columns=group_columns,
Expand Down Expand Up @@ -1090,7 +1090,7 @@ def group_parse(info, query):
tr_class=["bottom-align", ""])

@search_wrap(
table=db.gps_subgroups_test,
table=db.gps_subgroups,
title="Subgroup search results",
err_title="Subgroup search input error",
columns=subgroup_columns,
Expand Down Expand Up @@ -1506,15 +1506,15 @@ def gp_data(label):
return abort(404, f"Invalid label {label}")
bread = get_bread([(label, url_for_label(label)), ("Data", " ")])
title = f"Abstract group data - {label}"
return datapage(label, ["gps_groups_test", "gps_groups_cc_test", "gps_qchar_test", "gps_char_test", "gps_subgroups_test"], bread=bread, title=title, label_cols=["label", "group", "group", "group", "ambient"])
return datapage(label, ["gps_groups", "gps_groups_cc", "gps_qchar", "gps_char", "gps_subgroups"], bread=bread, title=title, label_cols=["label", "group", "group", "group", "ambient"])

@abstract_page.route("/sdata/<label>")
def sgp_data(label):
if not abstract_subgroup_label_regex.fullmatch(label):
return abort(404, f"Invalid label {label}")
bread = get_bread([(label, url_for_subgroup_label(label)), ("Data", " ")])
title = f"Abstract subgroup data - {label}"
data = db.gps_subgroups_test.lookup(label, ["ambient", "subgroup", "quotient"])
data = db.gps_subgroups.lookup(label, ["ambient", "subgroup", "quotient"])
if data is None:
return abort(404)
if data["quotient"] is None:
Expand All @@ -1530,7 +1530,7 @@ def download_group(**args):
com1 = "" # multiline comment start
com2 = "" # multiline comment end

#gp_data = db.gps_groups_test.lucky({"label": label})
#gp_data = db.gps_groups.lucky({"label": label})
wag = WebAbstractGroup(label)
gp_data = wag._data

Expand Down Expand Up @@ -2185,7 +2185,7 @@ def abstract_group_namecache(labels, cache=None, reverse=None):
# and serve as keys for the cache dictionary.
if cache is None:
cache = {}
for rec in db.gps_groups_test.search({"label": {"$in": labels}}, ["label", "order", "tex_name"]):
for rec in db.gps_groups.search({"label": {"$in": labels}}, ["label", "order", "tex_name"]):
label = rec["label"]
cache[label] = rec
if reverse is not None:
Expand Down
Loading

0 comments on commit 44ad0ea

Please sign in to comment.