X Tutup
Skip to content

Commit d50e812

Browse files
ci: merge main to release
2 parents 17b32e9 + 666100a commit d50e812

33 files changed

+1008
-151
lines changed

dev/build/Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
FROM ghcr.io/ietf-tools/datatracker-app-base:20260211T1901
1+
FROM ghcr.io/ietf-tools/datatracker-app-base:20260304T1633
22
LABEL maintainer="IETF Tools Team <tools-discuss@ietf.org>"
33

44
ENV DEBIAN_FRONTEND=noninteractive

dev/build/TARGET_BASE

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
20260211T1901
1+
20260304T1633

docker-compose.yml

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,9 +13,10 @@ services:
1313
# network_mode: service:db
1414

1515
depends_on:
16+
- blobdb
17+
- blobstore
1618
- db
1719
- mq
18-
- blobstore
1920

2021
ipc: host
2122

@@ -79,7 +80,10 @@ services:
7980
command:
8081
- '--loglevel=INFO'
8182
depends_on:
83+
- blobdb
84+
- blobstore
8285
- db
86+
- mq
8387
restart: unless-stopped
8488
stop_grace_period: 1m
8589
volumes:
@@ -102,7 +106,10 @@ services:
102106
- '--concurrency=1'
103107

104108
depends_on:
109+
- blobdb
110+
- blobstore
105111
- db
112+
- mq
106113
restart: unless-stopped
107114
stop_grace_period: 1m
108115
volumes:

docker/configs/settings_local.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -105,3 +105,7 @@
105105
"ietf.api.red_api" : ["devtoken", "redtoken"], # Not a real secret
106106
"ietf.api.views_rpc" : ["devtoken"], # Not a real secret
107107
}
108+
109+
# Errata system api configuration
110+
ERRATA_METADATA_NOTIFICATION_URL = "http://host.docker.internal:8808/api/rfc_metadata_update/"
111+
ERRATA_METADATA_NOTIFICATION_API_KEY = "not a real secret"

ietf/api/serializers_rpc.py

Lines changed: 199 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@
2727
update_rfcauthors,
2828
)
2929
from ietf.group.models import Group
30+
from ietf.group.serializers import AreaSerializer
3031
from ietf.name.models import StreamName, StdLevelName
3132
from ietf.person.models import Person
3233
from ietf.utils import log
@@ -115,6 +116,7 @@ class FullDraftSerializer(serializers.ModelSerializer):
115116
name = serializers.CharField(max_length=255)
116117
title = serializers.CharField(max_length=255)
117118
group = serializers.SlugRelatedField(slug_field="acronym", read_only=True)
119+
area = AreaSerializer(read_only=True)
118120

119121
# Other fields we need to add / adjust
120122
source_format = serializers.SerializerMethodField()
@@ -133,6 +135,7 @@ class Meta:
133135
"stream",
134136
"title",
135137
"group",
138+
"area",
136139
"abstract",
137140
"pages",
138141
"source_format",
@@ -216,32 +219,24 @@ class Meta:
216219
read_only_fields = ["id", "name"]
217220

218221

219-
class EditableRfcSerializer(serializers.ModelSerializer):
220-
# Would be nice to reconcile this with ietf.doc.serializers.RfcSerializer.
221-
# The purposes of that serializer (representing data for Red) and this one
222-
# (accepting updates from Purple) are different enough that separate formats
223-
# may be needed, but if not it'd be nice to have a single RfcSerializer that
224-
# can serve both.
225-
#
226-
# For now, only handles authors
227-
authors = RfcAuthorSerializer(many=True, min_length=1, source="rfcauthor_set")
222+
def _update_authors(rfc, authors_data):
223+
# Construct unsaved instances from validated author data
224+
new_authors = [RfcAuthor(**authdata) for authdata in authors_data]
225+
# Update the RFC with the new author set
226+
with transaction.atomic():
227+
change_events = update_rfcauthors(rfc, new_authors)
228+
for event in change_events:
229+
event.save()
230+
return change_events
228231

229-
class Meta:
230-
model = Document
231-
fields = ["id", "authors"]
232232

233-
def update(self, instance, validated_data):
234-
assert isinstance(instance, Document)
235-
authors_data = validated_data.pop("rfcauthor_set", None)
236-
if authors_data is not None:
237-
# Construct unsaved instances from validated author data
238-
new_authors = [RfcAuthor(**ad) for ad in authors_data]
239-
# Update the RFC with the new author set
240-
with transaction.atomic():
241-
change_events = update_rfcauthors(instance, new_authors)
242-
for event in change_events:
243-
event.save()
244-
return instance
233+
class SubseriesNameField(serializers.RegexField):
234+
235+
def __init__(self, **kwargs):
236+
# pattern: no leading 0, finite length (arbitrarily set to 5 digits)
237+
regex = r"^(bcp|std|fyi)[1-9][0-9]{0,4}$"
238+
super().__init__(regex, **kwargs)
239+
245240

246241

247242
class RfcPubSerializer(serializers.ModelSerializer):
@@ -283,13 +278,7 @@ class RfcPubSerializer(serializers.ModelSerializer):
283278
slug_field="rfc_number",
284279
queryset=Document.objects.filter(type_id="rfc"),
285280
)
286-
subseries = serializers.ListField(
287-
child=serializers.RegexField(
288-
required=False,
289-
# pattern: no leading 0, finite length (arbitrarily set to 5 digits)
290-
regex=r"^(bcp|std|fyi)[1-9][0-9]{0,4}$",
291-
)
292-
)
281+
subseries = serializers.ListField(child=SubseriesNameField(required=False))
293282
# N.b., authors is _not_ a field on Document!
294283
authors = RfcAuthorSerializer(many=True)
295284

@@ -327,6 +316,9 @@ def validate(self, data):
327316
)
328317
return data
329318

319+
def update(self, instance, validated_data):
320+
raise RuntimeError("Cannot update with this serializer")
321+
330322
def create(self, validated_data):
331323
"""Publish an RFC"""
332324
published = validated_data.pop("published")
@@ -515,6 +507,182 @@ def _create_rfc(self, validated_data):
515507
return rfc
516508

517509

510+
class EditableRfcSerializer(serializers.ModelSerializer):
511+
# Would be nice to reconcile this with ietf.doc.serializers.RfcSerializer.
512+
# The purposes of that serializer (representing data for Red) and this one
513+
# (accepting updates from Purple) are different enough that separate formats
514+
# may be needed, but if not it'd be nice to have a single RfcSerializer that
515+
# can serve both.
516+
#
517+
# Should also consider whether this and RfcPubSerializer should merge.
518+
#
519+
# Treats published and subseries fields as write-only. This isn't quite correct,
520+
# but makes it easier and we don't currently use the serialized value except for
521+
# debugging.
522+
published = serializers.DateTimeField(
523+
default_timezone=datetime.timezone.utc,
524+
write_only=True,
525+
)
526+
authors = RfcAuthorSerializer(many=True, min_length=1, source="rfcauthor_set")
527+
subseries = serializers.ListField(
528+
child=SubseriesNameField(required=False),
529+
write_only=True,
530+
)
531+
532+
class Meta:
533+
model = Document
534+
fields = [
535+
"published",
536+
"title",
537+
"authors",
538+
"stream",
539+
"abstract",
540+
"pages",
541+
"std_level",
542+
"subseries",
543+
]
544+
545+
def create(self, validated_data):
546+
raise RuntimeError("Cannot create with this serializer")
547+
548+
def update(self, instance, validated_data):
549+
assert isinstance(instance, Document)
550+
assert instance.type_id == "rfc"
551+
rfc = instance # get better name
552+
553+
system_person = Person.objects.get(name="(System)")
554+
555+
# Remove data that needs special handling. Use a singleton object to detect
556+
# missing values in case we ever support a value that needs None as an option.
557+
omitted = object()
558+
published = validated_data.pop("published", omitted)
559+
subseries = validated_data.pop("subseries", omitted)
560+
authors_data = validated_data.pop("rfcauthor_set", omitted)
561+
562+
# Transaction to clean up if something fails
563+
with transaction.atomic():
564+
# update the rfc Document itself
565+
rfc_changes = []
566+
rfc_events = []
567+
568+
for attr, new_value in validated_data.items():
569+
old_value = getattr(rfc, attr)
570+
if new_value != old_value:
571+
rfc_changes.append(
572+
f"changed {attr} to '{new_value}' from '{old_value}'"
573+
)
574+
setattr(rfc, attr, new_value)
575+
if len(rfc_changes) > 0:
576+
rfc_change_summary = f"{', '.join(rfc_changes)}"
577+
rfc_events.append(
578+
DocEvent.objects.create(
579+
doc=rfc,
580+
rev=rfc.rev,
581+
by=system_person,
582+
type="sync_from_rfc_editor",
583+
desc=f"Changed metadata: {rfc_change_summary}",
584+
)
585+
)
586+
if authors_data is not omitted:
587+
rfc_events.extend(_update_authors(instance, authors_data))
588+
589+
if published is not omitted:
590+
published_event = rfc.latest_event(type="published_rfc")
591+
if published_event is None:
592+
# unexpected, but possible in theory
593+
rfc_events.append(
594+
DocEvent.objects.create(
595+
doc=rfc,
596+
rev=rfc.rev,
597+
type="published_rfc",
598+
time=published,
599+
by=system_person,
600+
desc="RFC published",
601+
)
602+
)
603+
rfc_events.append(
604+
DocEvent.objects.create(
605+
doc=rfc,
606+
rev=rfc.rev,
607+
type="sync_from_rfc_editor",
608+
by=system_person,
609+
desc=(
610+
f"Set publication timestamp to {published.isoformat()}"
611+
),
612+
)
613+
)
614+
else:
615+
original_pub_time = published_event.time
616+
if published != original_pub_time:
617+
published_event.time = published
618+
published_event.save()
619+
rfc_events.append(
620+
DocEvent.objects.create(
621+
doc=rfc,
622+
rev=rfc.rev,
623+
type="sync_from_rfc_editor",
624+
by=system_person,
625+
desc=(
626+
f"Changed publication time to "
627+
f"{published.isoformat()} from "
628+
f"{original_pub_time.isoformat()}"
629+
)
630+
)
631+
)
632+
633+
# update subseries relations
634+
if subseries is not omitted:
635+
for subseries_doc_name in subseries:
636+
ss_slug = subseries_doc_name[:3]
637+
subseries_doc, ss_doc_created = Document.objects.get_or_create(
638+
type_id=ss_slug, name=subseries_doc_name
639+
)
640+
if ss_doc_created:
641+
subseries_doc.docevent_set.create(
642+
type=f"{ss_slug}_doc_created",
643+
by=system_person,
644+
desc=f"Created {subseries_doc_name} via update of {rfc.name}",
645+
)
646+
_, ss_rel_created = subseries_doc.relateddocument_set.get_or_create(
647+
relationship_id="contains", target=rfc
648+
)
649+
if ss_rel_created:
650+
subseries_doc.docevent_set.create(
651+
type="sync_from_rfc_editor",
652+
by=system_person,
653+
desc=f"Added {rfc.name} to {subseries_doc.name}",
654+
)
655+
rfc_events.append(
656+
rfc.docevent_set.create(
657+
type="sync_from_rfc_editor",
658+
by=system_person,
659+
desc=f"Added {rfc.name} to {subseries_doc.name}",
660+
)
661+
)
662+
# Delete subseries relations that are no longer current
663+
stale_subseries_relations = rfc.relations_that("contains").exclude(
664+
source__name__in=subseries
665+
)
666+
for stale_relation in stale_subseries_relations:
667+
stale_subseries_doc = stale_relation.source
668+
rfc_events.append(
669+
rfc.docevent_set.create(
670+
type="sync_from_rfc_editor",
671+
by=system_person,
672+
desc=f"Removed {rfc.name} from {stale_subseries_doc.name}",
673+
)
674+
)
675+
stale_subseries_doc.docevent_set.create(
676+
type="sync_from_rfc_editor",
677+
by=system_person,
678+
desc=f"Removed {rfc.name} from {stale_subseries_doc.name}",
679+
)
680+
stale_subseries_relations.delete()
681+
if len(rfc_events) > 0:
682+
rfc.save_with_history(rfc_events)
683+
return rfc
684+
685+
518686
class RfcFileSerializer(serializers.Serializer):
519687
# The structure of this serializer is constrained by what openapi-generator-cli's
520688
# python generator can correctly serialize as multipart/form-data. It does not

0 commit comments

Comments
 (0)
X Tutup