Compare commits

..

35 Commits

Author SHA1 Message Date
Jeremy Stretch
cbe14b76c0 Release v4.3.1 2025-05-13 15:44:10 -04:00
Jeremy Stretch
3d1334a798 Fixes #19464: Fix bulk editing of inventory items from device view (#19477) 2025-05-13 10:23:02 -07:00
Jeremy Stretch
408550d3c7 Fixes #19463: Hide button dropdown for tables which do not support saved configs (#19481) 2025-05-13 10:22:15 -07:00
bctiemann
6b9b5c4184 Merge pull request #19456 from netbox-community/19444-contact-groups-changelog
Fixes #19444: Fix change logging for contact group assignments
2025-05-13 11:39:59 -04:00
Jeremy Stretch
59dce87ba0 Fixes #19465: Fix ability to clear assigned prefix scope in UI (#19479) 2025-05-13 10:21:06 -05:00
bctiemann
f6a85775d7 Merge pull request #19480 from netbox-community/19472-vdc-device-column
Fixes #19472: Fix device column rendering in virtual device contexts table
2025-05-13 11:12:55 -04:00
Jeremy Stretch
33887e7c69 Fixes #19472: Fix devie column rendering in virtual device contexts table 2025-05-13 10:46:41 -04:00
github-actions
b57ceca2fd Update source translation strings 2025-05-13 05:02:09 +00:00
bctiemann
8e13f2a9ec Merge pull request #19443 from netbox-community/19440-migration-connections
Fixes #19440: Ensure data migrations use the correct database connection
2025-05-12 14:13:31 -04:00
bctiemann
6af4f5d7ee Merge pull request #19400 from netbox-community/19397-graphql-IPRangeFilter-role
Fixes #19397: Fix filtering IP ranges by role in GraphQL API
2025-05-12 14:05:13 -04:00
bctiemann
6054f8197d Merge pull request #19418 from netbox-community/19381-script
19381 fix data file script sync
2025-05-12 11:32:16 -04:00
github-actions
fc98294812 Update source translation strings 2025-05-10 05:02:10 +00:00
Martin Hauser
4b58678823 feat(dcim): Add 2.5 Gbps and 5 Gbps options to InterfaceSpeedChoices (#19445)
Extend `InterfaceSpeedChoices` to include 2.5 Gbps and 5 Gbps values.
This improves support for modern interface speeds and enhances API data
validation.
2025-05-09 14:02:30 -05:00
Jeremy Stretch
abeed474f6 Fixes #19444: Fix changeloggin for contact group assignments 2025-05-09 14:21:02 -04:00
Martin Hauser
d1303f49e6 Fixes #19432 - Update PostgreSQL Version in Programming Error Message (#19446) 2025-05-09 07:38:47 -07:00
Martin Hauser
127452f4d5 feat(search): Add search index for tags
Introduces a search index for the Tag model to enable global search for
Tags. Includes fields for name, slug, and description with corresponding
weight values. Display attributes are limited to the description field.

Fixes #17073
2025-05-09 08:55:05 -04:00
github-actions
2979067b65 Update source translation strings 2025-05-09 05:02:08 +00:00
Abraham Vegh
6c07aeeded Add 1000BASE-SX interface type 2025-05-08 15:45:03 -04:00
Jeremy Stretch
76aa255f07 Fixes #19440: Ensure data migrations use the correct database connection 2025-05-08 14:53:52 -04:00
dianbofa
0c04a8d301 feat(core): Add queue_name parameter to Job.enqueue() method (#19424) 2025-05-08 08:39:55 -07:00
Corubba
6665810a6d Fixes #19361: Fix wrong graphql field data-types (#19373) 2025-05-07 08:29:52 -07:00
Jason Novinger
8baf15771a Fixes #17107: Circuit to Provider Network cabling visual bug 2025-05-07 09:28:02 -04:00
github-actions
045417c45c Update source translation strings 2025-05-07 05:02:11 +00:00
Arthur
aac333a6d4 19381 fix data file script sync 2025-05-06 11:50:02 -07:00
Andrey Tikhonov
145ee11a3f Fixes #19309: N+1 problem on /interfaces, /ip-addresses and /prefixes requests (#19304)
* Fixes N+1 problem on /interfaces, /ip-addresses and /prefixes requests

* remove extra .all()

* more prefetch for IPAddressViewSet
2025-05-06 11:47:44 -05:00
github-actions
94618a9dfb Update source translation strings 2025-05-06 05:02:20 +00:00
mr1716
21e813cee2 #19404 Deduplicate IP Range API Serializer (#19405) 2025-05-05 14:31:12 -05:00
Étienne Brunel
2c014bade5 fix: Set qinq_role allow_null to True 2025-05-05 10:16:05 -04:00
mr1716
b17bfef7e5 Fixes #19370: Update documentation default values (#19374)
* Update security.md for default values

* Update plugins.md documentation default formatting

* Tweak punctuation

---------

Co-authored-by: Jeremy Stretch <jstretch@netboxlabs.com>
2025-05-05 09:33:59 -04:00
Arthur Hanson
88f7b6508c 19380 call configure on embedded tables (#19390)
* 19380 call configure on  embedded tables

* 19380 call configure on  embedded tables

* 19380 call configure on  embedded tables
2025-05-05 09:29:32 -04:00
Jeremy Stretch
bd4f1e7d2f Fixes #19397: Fix filtering IP ranges by role in GraphQL API 2025-05-05 08:41:46 -04:00
Jeremy Stretch
6e49cee718 Fixes #19376: Fix FieldDoesNotExist exception when global search results include a contact (#19389) 2025-05-02 14:24:08 -05:00
Jeremy Stretch
4868818576 Fixes #19375: Fix table configuration after applying a saved table config (#19385) 2025-05-02 10:13:28 -07:00
Jeremy Stretch
7cd5dc0c84 Closes #19383: Extend security policy to provide guidance on compliance reporting 2025-05-02 10:20:57 -04:00
github-actions
aea51df06c Update source translation strings 2025-05-02 05:02:18 +00:00
105 changed files with 50203 additions and 44708 deletions

View File

@@ -15,7 +15,7 @@ body:
attributes:
label: NetBox version
description: What version of NetBox are you currently running?
placeholder: v4.3.0
placeholder: v4.3.1
validations:
required: true
- type: dropdown

View File

@@ -27,7 +27,7 @@ body:
attributes:
label: NetBox Version
description: What version of NetBox are you currently running?
placeholder: v4.3.0
placeholder: v4.3.1
validations:
required: true
- type: dropdown

View File

@@ -14,6 +14,12 @@ Administrators are encouraged to adhere to industry best practices concerning th
* Prohibit access to your database from clients other than the NetBox application
* Keep your deployment updated to the most recent stable release
## Compliance Reporting
Please note that security compliance reports (e.g. SOC 2) are provided by NetBox Labs only to customers using NetBox Cloud or NetBox Enterprise. They are not available to users of self-hosted NetBox Community Edition.
If you would like to consider upgrading to NetBox Cloud or Enterprise, please contact `sales@netboxlabs.com`.
## Reporting a Suspected Vulnerability
If you believe you've uncovered a security vulnerability and wish to report it confidentially, you may do so by emailing `security@netboxlabs.com`. Please ensure that your report meets all the following conditions:

View File

@@ -329,6 +329,7 @@
"100base-tx",
"100base-t1",
"1000base-t",
"1000base-sx",
"1000base-lx",
"1000base-tx",
"2.5gbase-t",

View File

@@ -35,7 +35,7 @@ Note that a plugin must be listed in `PLUGINS` for its configuration to take eff
## PLUGINS_CATALOG_CONFIG
Default: Empty
Default: `{}` (Empty)
This parameter controls how individual plugins are displayed in the plugins catalog under Admin > System > Plugins. Adding a plugin to the `hidden` list will omit that plugin from the catalog. Adding a plugin to the `static` list will display the plugin, but not link to the plugin details or upgrade instructions.

View File

@@ -135,7 +135,7 @@ DEFAULT_PERMISSIONS = {
## EXEMPT_VIEW_PERMISSIONS
Default: Empty list
Default: `[]` (Empty list)
A list of NetBox models to exempt from the enforcement of view permissions. Models listed here will be viewable by all users, both authenticated and anonymous.
@@ -191,7 +191,7 @@ The lifetime (in seconds) of the authentication cookie issued to a NetBox user u
## LOGIN_FORM_HIDDEN
Default: False
Default: `False`
Option to hide the login form when only SSO authentication is in use.

View File

@@ -53,6 +53,7 @@ If a new Django release is adopted or other major dependencies (Python, PostgreS
* Update the installation guide (`docs/installation/index.md`) with the new minimum versions.
* Update the upgrade guide (`docs/installation/upgrading.md`) for the current version accordingly.
* Update the minimum PostgreSQL version in the programming error template (`netbox/templates/exceptions/programming_error.html`).
### Manually Perform a New Install

View File

@@ -1,3 +1,35 @@
# NetBox v4.3
## v4.3.1 (2025-05-13)
### Enhancements
* [#17073](https://github.com/netbox-community/netbox/issues/17073) - Enable global search for tags
* [#18419](https://github.com/netbox-community/netbox/issues/18419) - Enable specifying a queue name when calling `Job.enqueue()`
* [#19416](https://github.com/netbox-community/netbox/issues/19416) - Add the 1000BASE-SX interface type
* [#19434](https://github.com/netbox-community/netbox/issues/19434) - Add pre-populated interface speed choices for 2.5 and 5 Gbps
### Bug Fixes
* [#17107](https://github.com/netbox-community/netbox/issues/17107) - Fix cosmetic issue in cable traces ending at a provider network
* [#19309](https://github.com/netbox-community/netbox/issues/19309) - Improve REST API query performance for prefixes and IP addresses
* [#19361](https://github.com/netbox-community/netbox/issues/19361) - Fix incorrect GraphQL object types
* [#19375](https://github.com/netbox-community/netbox/issues/19375) - Fix table configuration after applying a saved table config
* [#19376](https://github.com/netbox-community/netbox/issues/19376) - Fix `FieldDoesNotExist` exception when global search results include a contact
* [#19380](https://github.com/netbox-community/netbox/issues/19380) - Fix column selections for child object tables
* [#19381](https://github.com/netbox-community/netbox/issues/19381) - Fix syncing of custom scripts from a remote data source
* [#19396](https://github.com/netbox-community/netbox/issues/19396) - Enable nullifying VLAN `qinq_role` via the REST API
* [#19397](https://github.com/netbox-community/netbox/issues/19397) - Correct enum type for IPRangeFilter in GraphQL API
* [#19432](https://github.com/netbox-community/netbox/issues/19432) - Update minimum required PostgreSQL version referenced by server error page
* [#19440](https://github.com/netbox-community/netbox/issues/19440) - Ensure data migrations use the correct database connection
* [#19444](https://github.com/netbox-community/netbox/issues/19444) - Fix change logging for contact group assignments
* [#19463](https://github.com/netbox-community/netbox/issues/19463) - Hide button dropdown for tables which do not support saved configs
* [#19464](https://github.com/netbox-community/netbox/issues/19464) - Fix bulk editing of inventory items from device view
* [#19465](https://github.com/netbox-community/netbox/issues/19465) - Fix ability to clear assigned prefix scope in UI
* [#19472](https://github.com/netbox-community/netbox/issues/19472) - Fix device column rendering in virtual device contexts table
---
## v4.3.0 (2025-05-01)
### Breaking Changes

View File

@@ -197,6 +197,7 @@ class ProfileView(LoginRequiredMixin, View):
'changed_object_type'
)[:20]
changelog_table = ObjectChangeTable(changelog)
changelog_table.configure(request)
return render(request, self.template_name, {
'changelog_table': changelog_table,

View File

@@ -8,10 +8,11 @@ def set_null_values(apps, schema_editor):
Circuit = apps.get_model('circuits', 'Circuit')
CircuitGroupAssignment = apps.get_model('circuits', 'CircuitGroupAssignment')
CircuitTermination = apps.get_model('circuits', 'CircuitTermination')
db_alias = schema_editor.connection.alias
Circuit.objects.filter(distance_unit='').update(distance_unit=None)
CircuitGroupAssignment.objects.filter(priority='').update(priority=None)
CircuitTermination.objects.filter(cable_end='').update(cable_end=None)
Circuit.objects.using(db_alias).filter(distance_unit='').update(distance_unit=None)
CircuitGroupAssignment.objects.using(db_alias).filter(priority='').update(priority=None)
CircuitTermination.objects.using(db_alias).filter(cable_end='').update(cable_end=None)
class Migration(migrations.Migration):

View File

@@ -8,14 +8,15 @@ def copy_site_assignments(apps, schema_editor):
"""
ContentType = apps.get_model('contenttypes', 'ContentType')
CircuitTermination = apps.get_model('circuits', 'CircuitTermination')
ProviderNetwork = apps.get_model('circuits', 'ProviderNetwork')
Site = apps.get_model('dcim', 'Site')
db_alias = schema_editor.connection.alias
CircuitTermination.objects.filter(site__isnull=False).update(
CircuitTermination.objects.using(db_alias).filter(site__isnull=False).update(
termination_type=ContentType.objects.get_for_model(Site), termination_id=models.F('site_id')
)
ProviderNetwork = apps.get_model('circuits', 'ProviderNetwork')
CircuitTermination.objects.filter(provider_network__isnull=False).update(
CircuitTermination.objects.using(db_alias).filter(provider_network__isnull=False).update(
termination_type=ContentType.objects.get_for_model(ProviderNetwork),
termination_id=models.F('provider_network_id'),
)

View File

@@ -7,15 +7,20 @@ def populate_denormalized_fields(apps, schema_editor):
Copy site ForeignKey values to the Termination GFK.
"""
CircuitTermination = apps.get_model('circuits', 'CircuitTermination')
db_alias = schema_editor.connection.alias
terminations = CircuitTermination.objects.filter(site__isnull=False).prefetch_related('site')
terminations = CircuitTermination.objects.using(db_alias).filter(site__isnull=False).prefetch_related('site')
for termination in terminations:
termination._region_id = termination.site.region_id
termination._site_group_id = termination.site.group_id
termination._site_id = termination.site_id
# Note: Location cannot be set prior to migration
CircuitTermination.objects.bulk_update(terminations, ['_region', '_site_group', '_site'], batch_size=100)
CircuitTermination.objects.using(db_alias).bulk_update(
terminations,
['_region', '_site_group', '_site'],
batch_size=100
)
class Migration(migrations.Migration):

View File

@@ -9,8 +9,9 @@ def set_member_type(apps, schema_editor):
ContentType = apps.get_model('contenttypes', 'ContentType')
Circuit = apps.get_model('circuits', 'Circuit')
CircuitGroupAssignment = apps.get_model('circuits', 'CircuitGroupAssignment')
db_alias = schema_editor.connection.alias
CircuitGroupAssignment.objects.update(
CircuitGroupAssignment.objects.using(db_alias).update(
member_type=ContentType.objects.get_for_model(Circuit)
)

View File

@@ -88,19 +88,11 @@ class ManagedFile(SyncedDataMixin, models.Model):
def sync_data(self):
if self.data_file:
self.file_path = os.path.basename(self.data_path)
self._write_to_disk(self.full_path, overwrite=True)
def _write_to_disk(self, path, overwrite=False):
"""
Write the object's data to disk at the specified path
"""
# Check whether file already exists
storage = self.storage
if storage.exists(path) and not overwrite:
raise FileExistsError()
storage = self.storage
with storage.open(path, 'wb+') as new_file:
new_file.write(self.data)
with storage.open(self.full_path, 'wb+') as new_file:
new_file.write(self.data_file.data)
@cached_property
def storage(self):

View File

@@ -215,6 +215,7 @@ class Job(models.Model):
schedule_at=None,
interval=None,
immediate=False,
queue_name=None,
**kwargs
):
"""
@@ -238,7 +239,7 @@ class Job(models.Model):
object_id = instance.pk
else:
object_type = object_id = None
rq_queue_name = get_queue_for_model(object_type.model if object_type else None)
rq_queue_name = queue_name if queue_name else get_queue_for_model(object_type.model if object_type else None)
queue = django_rq.get_queue(rq_queue_name)
status = JobStatusChoices.STATUS_SCHEDULED if schedule_at else JobStatusChoices.STATUS_PENDING
job = Job(

View File

@@ -223,6 +223,7 @@ class ObjectChangeView(generic.ObjectView):
data=related_changes[:50],
orderable=False
)
related_changes_table.configure(request)
objectchanges = ObjectChange.objects.valid_models().restrict(request.user, 'view').filter(
changed_object_type=instance.changed_object_type,

View File

@@ -461,6 +461,7 @@ class InterfaceViewSet(PathEndpointMixin, NetBoxModelViewSet):
Interface.objects.select_related("device", "cable"),
],
),
'virtual_circuit_termination',
'l2vpn_terminations', # Referenced by InterfaceSerializer.l2vpn_termination
'ip_addresses', # Referenced by Interface.count_ipaddresses()
'fhrp_group_assignments', # Referenced by Interface.count_fhrp_groups()

View File

@@ -874,6 +874,7 @@ class InterfaceTypeChoices(ChoiceSet):
TYPE_100ME_T1 = '100base-t1'
TYPE_100ME_SFP = '100base-x-sfp'
TYPE_1GE_FIXED = '1000base-t'
TYPE_1GE_SX_FIXED = '1000base-sx'
TYPE_1GE_LX_FIXED = '1000base-lx'
TYPE_1GE_TX_FIXED = '1000base-tx'
TYPE_1GE_GBIC = '1000base-x-gbic'
@@ -1038,6 +1039,7 @@ class InterfaceTypeChoices(ChoiceSet):
(TYPE_100ME_FIXED, '100BASE-TX (10/100ME)'),
(TYPE_100ME_T1, '100BASE-T1 (10/100ME Single Pair)'),
(TYPE_1GE_FIXED, '1000BASE-T (1GE)'),
(TYPE_1GE_SX_FIXED, '1000BASE-SX (1GE)'),
(TYPE_1GE_LX_FIXED, '1000BASE-LX (1GE)'),
(TYPE_1GE_TX_FIXED, '1000BASE-TX (1GE)'),
(TYPE_2GE_FIXED, '2.5GBASE-T (2.5GE)'),
@@ -1238,6 +1240,8 @@ class InterfaceSpeedChoices(ChoiceSet):
(10000, '10 Mbps'),
(100000, '100 Mbps'),
(1000000, '1 Gbps'),
(2500000, '2.5 Gbps'),
(5000000, '5 Gbps'),
(10000000, '10 Gbps'),
(25000000, '25 Gbps'),
(40000000, '40 Gbps'),

View File

@@ -1779,6 +1779,13 @@ class InventoryItemBulkEditForm(
)
nullable_fields = ('label', 'role', 'manufacturer', 'part_id', 'description')
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# Remove parent device passed as context to avoid conflicts with the actual device field
# on this form (see bug #19464)
self.initial.pop('device', None)
#
# Device component roles

View File

@@ -66,6 +66,10 @@ class ScopedForm(forms.Form):
if self.instance and scope_type_id != self.instance.scope_type_id:
self.initial['scope'] = None
else:
# Clear the initial scope value if scope_type is not set
self.initial['scope'] = None
class ScopedBulkEditForm(forms.Form):
scope_type = ContentTypeChoiceField(

View File

@@ -541,10 +541,10 @@ class LocationType(VLANGroupsMixin, ImageAttachmentsMixin, ContactsMixin, Organi
class ManufacturerType(OrganizationalObjectType, ContactsMixin):
platforms: List[Annotated["PlatformType", strawberry.lazy('dcim.graphql.types')]]
device_types: List[Annotated["DeviceType", strawberry.lazy('dcim.graphql.types')]]
device_types: List[Annotated["DeviceTypeType", strawberry.lazy('dcim.graphql.types')]]
inventory_item_templates: List[Annotated["InventoryItemTemplateType", strawberry.lazy('dcim.graphql.types')]]
inventory_items: List[Annotated["InventoryItemType", strawberry.lazy('dcim.graphql.types')]]
module_types: List[Annotated["ModuleType", strawberry.lazy('dcim.graphql.types')]]
module_types: List[Annotated["ModuleTypeType", strawberry.lazy('dcim.graphql.types')]]
@strawberry_django.type(
@@ -617,11 +617,11 @@ class ModuleTypeType(NetBoxObjectType):
frontporttemplates: List[Annotated["FrontPortTemplateType", strawberry.lazy('dcim.graphql.types')]]
consoleserverporttemplates: List[Annotated["ConsoleServerPortTemplateType", strawberry.lazy('dcim.graphql.types')]]
interfacetemplates: List[Annotated["InterfaceTemplateType", strawberry.lazy('dcim.graphql.types')]]
powerporttemplates: List[Annotated["PowerOutletTemplateType", strawberry.lazy('dcim.graphql.types')]]
powerporttemplates: List[Annotated["PowerPortTemplateType", strawberry.lazy('dcim.graphql.types')]]
poweroutlettemplates: List[Annotated["PowerOutletTemplateType", strawberry.lazy('dcim.graphql.types')]]
rearporttemplates: List[Annotated["RearPortTemplateType", strawberry.lazy('dcim.graphql.types')]]
instances: List[Annotated["InterfaceType", strawberry.lazy('dcim.graphql.types')]]
consoleporttemplates: List[Annotated["ModuleType", strawberry.lazy('dcim.graphql.types')]]
instances: List[Annotated["ModuleType", strawberry.lazy('dcim.graphql.types')]]
consoleporttemplates: List[Annotated["ConsolePortTemplateType", strawberry.lazy('dcim.graphql.types')]]
@strawberry_django.type(

View File

@@ -26,49 +26,50 @@ def set_null_values(apps, schema_editor):
RackType = apps.get_model('dcim', 'RackType')
RearPort = apps.get_model('dcim', 'RearPort')
Site = apps.get_model('dcim', 'Site')
db_alias = schema_editor.connection.alias
Cable.objects.filter(length_unit='').update(length_unit=None)
Cable.objects.filter(type='').update(type=None)
ConsolePort.objects.filter(cable_end='').update(cable_end=None)
ConsolePort.objects.filter(type='').update(type=None)
ConsolePortTemplate.objects.filter(type='').update(type=None)
ConsoleServerPort.objects.filter(cable_end='').update(cable_end=None)
ConsoleServerPort.objects.filter(type='').update(type=None)
ConsoleServerPortTemplate.objects.filter(type='').update(type=None)
Device.objects.filter(airflow='').update(airflow=None)
Device.objects.filter(face='').update(face=None)
DeviceType.objects.filter(airflow='').update(airflow=None)
DeviceType.objects.filter(subdevice_role='').update(subdevice_role=None)
DeviceType.objects.filter(weight_unit='').update(weight_unit=None)
FrontPort.objects.filter(cable_end='').update(cable_end=None)
Interface.objects.filter(cable_end='').update(cable_end=None)
Interface.objects.filter(mode='').update(mode=None)
Interface.objects.filter(poe_mode='').update(poe_mode=None)
Interface.objects.filter(poe_type='').update(poe_type=None)
Interface.objects.filter(rf_channel='').update(rf_channel=None)
Interface.objects.filter(rf_role='').update(rf_role=None)
InterfaceTemplate.objects.filter(poe_mode='').update(poe_mode=None)
InterfaceTemplate.objects.filter(poe_type='').update(poe_type=None)
InterfaceTemplate.objects.filter(rf_role='').update(rf_role=None)
ModuleType.objects.filter(airflow='').update(airflow=None)
ModuleType.objects.filter(weight_unit='').update(weight_unit=None)
PowerFeed.objects.filter(cable_end='').update(cable_end=None)
PowerOutlet.objects.filter(cable_end='').update(cable_end=None)
PowerOutlet.objects.filter(feed_leg='').update(feed_leg=None)
PowerOutlet.objects.filter(type='').update(type=None)
PowerOutletTemplate.objects.filter(feed_leg='').update(feed_leg=None)
PowerOutletTemplate.objects.filter(type='').update(type=None)
PowerPort.objects.filter(cable_end='').update(cable_end=None)
PowerPort.objects.filter(type='').update(type=None)
PowerPortTemplate.objects.filter(type='').update(type=None)
Rack.objects.filter(airflow='').update(airflow=None)
Rack.objects.filter(form_factor='').update(form_factor=None)
Rack.objects.filter(outer_unit='').update(outer_unit=None)
Rack.objects.filter(weight_unit='').update(weight_unit=None)
RackType.objects.filter(outer_unit='').update(outer_unit=None)
RackType.objects.filter(weight_unit='').update(weight_unit=None)
RearPort.objects.filter(cable_end='').update(cable_end=None)
Site.objects.filter(time_zone='').update(time_zone=None)
Cable.objects.using(db_alias).filter(length_unit='').update(length_unit=None)
Cable.objects.using(db_alias).filter(type='').update(type=None)
ConsolePort.objects.using(db_alias).filter(cable_end='').update(cable_end=None)
ConsolePort.objects.using(db_alias).filter(type='').update(type=None)
ConsolePortTemplate.objects.using(db_alias).filter(type='').update(type=None)
ConsoleServerPort.objects.using(db_alias).filter(cable_end='').update(cable_end=None)
ConsoleServerPort.objects.using(db_alias).filter(type='').update(type=None)
ConsoleServerPortTemplate.objects.using(db_alias).filter(type='').update(type=None)
Device.objects.using(db_alias).filter(airflow='').update(airflow=None)
Device.objects.using(db_alias).filter(face='').update(face=None)
DeviceType.objects.using(db_alias).filter(airflow='').update(airflow=None)
DeviceType.objects.using(db_alias).filter(subdevice_role='').update(subdevice_role=None)
DeviceType.objects.using(db_alias).filter(weight_unit='').update(weight_unit=None)
FrontPort.objects.using(db_alias).filter(cable_end='').update(cable_end=None)
Interface.objects.using(db_alias).filter(cable_end='').update(cable_end=None)
Interface.objects.using(db_alias).filter(mode='').update(mode=None)
Interface.objects.using(db_alias).filter(poe_mode='').update(poe_mode=None)
Interface.objects.using(db_alias).filter(poe_type='').update(poe_type=None)
Interface.objects.using(db_alias).filter(rf_channel='').update(rf_channel=None)
Interface.objects.using(db_alias).filter(rf_role='').update(rf_role=None)
InterfaceTemplate.objects.using(db_alias).filter(poe_mode='').update(poe_mode=None)
InterfaceTemplate.objects.using(db_alias).filter(poe_type='').update(poe_type=None)
InterfaceTemplate.objects.using(db_alias).filter(rf_role='').update(rf_role=None)
ModuleType.objects.using(db_alias).filter(airflow='').update(airflow=None)
ModuleType.objects.using(db_alias).filter(weight_unit='').update(weight_unit=None)
PowerFeed.objects.using(db_alias).filter(cable_end='').update(cable_end=None)
PowerOutlet.objects.using(db_alias).filter(cable_end='').update(cable_end=None)
PowerOutlet.objects.using(db_alias).filter(feed_leg='').update(feed_leg=None)
PowerOutlet.objects.using(db_alias).filter(type='').update(type=None)
PowerOutletTemplate.objects.using(db_alias).filter(feed_leg='').update(feed_leg=None)
PowerOutletTemplate.objects.using(db_alias).filter(type='').update(type=None)
PowerPort.objects.using(db_alias).filter(cable_end='').update(cable_end=None)
PowerPort.objects.using(db_alias).filter(type='').update(type=None)
PowerPortTemplate.objects.using(db_alias).filter(type='').update(type=None)
Rack.objects.using(db_alias).filter(airflow='').update(airflow=None)
Rack.objects.using(db_alias).filter(form_factor='').update(form_factor=None)
Rack.objects.using(db_alias).filter(outer_unit='').update(outer_unit=None)
Rack.objects.using(db_alias).filter(weight_unit='').update(weight_unit=None)
RackType.objects.using(db_alias).filter(outer_unit='').update(outer_unit=None)
RackType.objects.using(db_alias).filter(weight_unit='').update(weight_unit=None)
RearPort.objects.using(db_alias).filter(cable_end='').update(cable_end=None)
Site.objects.using(db_alias).filter(time_zone='').update(time_zone=None)
class Migration(migrations.Migration):

View File

@@ -6,19 +6,26 @@ def populate_mac_addresses(apps, schema_editor):
ContentType = apps.get_model('contenttypes', 'ContentType')
Interface = apps.get_model('dcim', 'Interface')
MACAddress = apps.get_model('dcim', 'MACAddress')
db_alias = schema_editor.connection.alias
interface_ct = ContentType.objects.get_for_model(Interface)
mac_addresses = [
MACAddress(
mac_address=interface.mac_address, assigned_object_type=interface_ct, assigned_object_id=interface.pk
mac_address=interface.mac_address,
assigned_object_type=interface_ct,
assigned_object_id=interface.pk
)
for interface in Interface.objects.filter(mac_address__isnull=False)
]
MACAddress.objects.bulk_create(mac_addresses, batch_size=100)
MACAddress.objects.using(db_alias).bulk_create(mac_addresses, batch_size=100)
# TODO: Optimize interface updates
for mac_address in mac_addresses:
Interface.objects.filter(pk=mac_address.assigned_object_id).update(primary_mac_address=mac_address)
Interface.objects.using(db_alias).filter(
pk=mac_address.assigned_object_id
).update(
primary_mac_address=mac_address
)
class Migration(migrations.Migration):

View File

@@ -11,6 +11,8 @@ def load_initial_data(apps, schema_editor):
Load initial ModuleTypeProfile objects from file.
"""
ModuleTypeProfile = apps.get_model('dcim', 'ModuleTypeProfile')
db_alias = schema_editor.connection.alias
initial_profiles = (
'cpu',
'fan',
@@ -25,7 +27,7 @@ def load_initial_data(apps, schema_editor):
with file_path.open('r') as f:
data = json.load(f)
try:
ModuleTypeProfile.objects.create(**data)
ModuleTypeProfile.objects.using(db_alias).create(**data)
except Exception as e:
print(f"Error loading data from {file_path}")
raise e

View File

@@ -329,11 +329,9 @@ class CableTraceSVG:
# Draw attachment (line)
start = (OFFSET + self.center, OFFSET + self.cursor)
height = PADDING * 2 + LINE_HEIGHT + PADDING * 2
end = (start[0], start[1] + height)
end = (start[0], start[1] + CABLE_HEIGHT)
line = Line(start=start, end=end, class_='attachment')
group.add(line)
self.cursor += PADDING * 4
return group
@@ -358,10 +356,10 @@ class CableTraceSVG:
# Else: No need to draw parent objects (parent objects are drawn in last "round" as the far-end!)
near_terminations = self.draw_terminations(near_ends, parent_object_nodes)
self.cursor += CABLE_HEIGHT
# Connector (a Cable or WirelessLink)
if links and far_ends:
self.cursor += CABLE_HEIGHT
obj_list = {end.parent_object for end in far_ends}
parent_object_nodes, far_terminations = self.draw_far_objects(obj_list, far_ends)
@@ -449,6 +447,7 @@ class CableTraceSVG:
# Attachment
attachment = self.draw_attachment()
self.connectors.append(attachment)
self.cursor += CABLE_HEIGHT
# Object
parent_object_nodes = self.draw_parent_objects(far_ends)

View File

@@ -1091,10 +1091,9 @@ class VirtualDeviceContextTable(TenancyColumnsMixin, NetBoxTable):
verbose_name=_('Name'),
linkify=True
)
device = tables.TemplateColumn(
device = tables.Column(
verbose_name=_('Device'),
order_by=('device___name',),
template_code=DEVICE_LINK,
linkify=True
)
status = columns.ChoiceFieldColumn(

View File

@@ -2793,6 +2793,7 @@ class InterfaceView(generic.ObjectView):
),
orderable=False
)
vdc_table.configure(request)
# Get bridge interfaces
bridge_interfaces = Interface.objects.restrict(request.user, 'view').filter(bridge=instance)
@@ -2801,6 +2802,7 @@ class InterfaceView(generic.ObjectView):
exclude=('device', 'parent'),
orderable=False
)
bridge_interfaces_table.configure(request)
# Get child interfaces
child_interfaces = Interface.objects.restrict(request.user, 'view').filter(parent=instance)
@@ -2809,6 +2811,7 @@ class InterfaceView(generic.ObjectView):
exclude=('device', 'parent'),
orderable=False
)
child_interfaces_table.configure(request)
# Get assigned VLANs and annotate whether each is tagged or untagged
vlans = []
@@ -2823,6 +2826,7 @@ class InterfaceView(generic.ObjectView):
data=vlans,
orderable=False
)
vlan_table.configure(request)
# Get VLAN translation rules
vlan_translation_table = None
@@ -2831,6 +2835,7 @@ class InterfaceView(generic.ObjectView):
data=instance.vlan_translation_policy.rules.all(),
orderable=False
)
vlan_translation_table.configure(request)
return {
'vdc_table': vdc_table,

View File

@@ -4,11 +4,12 @@ from django.db import migrations
def convert_reportmodule_jobs(apps, schema_editor):
ContentType = apps.get_model('contenttypes', 'ContentType')
Job = apps.get_model('core', 'Job')
db_alias = schema_editor.connection.alias
# Convert all ReportModule jobs to ScriptModule jobs
if reportmodule_ct := ContentType.objects.filter(app_label='extras', model='reportmodule').first():
scriptmodule_ct = ContentType.objects.get(app_label='extras', model='scriptmodule')
Job.objects.filter(object_type_id=reportmodule_ct.id).update(object_type_id=scriptmodule_ct.id)
if reportmodule_ct := ContentType.objects.using(db_alias).filter(app_label='extras', model='reportmodule').first():
scriptmodule_ct = ContentType.objects.using(db_alias).get(app_label='extras', model='scriptmodule')
Job.objects.using(db_alias).filter(object_type_id=reportmodule_ct.id).update(object_type_id=scriptmodule_ct.id)
class Migration(migrations.Migration):

View File

@@ -88,24 +88,33 @@ def update_scripts(apps, schema_editor):
ScriptModule = apps.get_model('extras', 'ScriptModule')
ReportModule = apps.get_model('extras', 'ReportModule')
Job = apps.get_model('core', 'Job')
db_alias = schema_editor.connection.alias
script_ct = ContentType.objects.get_for_model(Script, for_concrete_model=False)
scriptmodule_ct = ContentType.objects.get_for_model(ScriptModule, for_concrete_model=False)
reportmodule_ct = ContentType.objects.get_for_model(ReportModule, for_concrete_model=False)
for module in ScriptModule.objects.all():
for module in ScriptModule.objects.using(db_alias).all():
for script_name in get_module_scripts(module):
script = Script.objects.create(
script = Script.objects.using(db_alias).create(
name=script_name,
module=module,
)
# Update all Jobs associated with this ScriptModule & script name to point to the new Script object
Job.objects.filter(object_type_id=scriptmodule_ct.id, object_id=module.pk, name=script_name).update(
Job.objects.using(db_alias).filter(
object_type_id=scriptmodule_ct.id,
object_id=module.pk,
name=script_name
).update(
object_type_id=script_ct.id, object_id=script.pk
)
# Update all Jobs associated with this ScriptModule & script name to point to the new Script object
Job.objects.filter(object_type_id=reportmodule_ct.id, object_id=module.pk, name=script_name).update(
Job.objects.using(db_alias).filter(
object_type_id=reportmodule_ct.id,
object_id=module.pk,
name=script_name
).update(
object_type_id=script_ct.id, object_id=script.pk
)
@@ -119,16 +128,22 @@ def update_event_rules(apps, schema_editor):
Script = apps.get_model('extras', 'Script')
ScriptModule = apps.get_model('extras', 'ScriptModule')
EventRule = apps.get_model('extras', 'EventRule')
db_alias = schema_editor.connection.alias
script_ct = ContentType.objects.get_for_model(Script)
scriptmodule_ct = ContentType.objects.get_for_model(ScriptModule)
for eventrule in EventRule.objects.filter(action_object_type=scriptmodule_ct):
for eventrule in EventRule.objects.using(db_alias).filter(action_object_type=scriptmodule_ct):
name = eventrule.action_parameters.get('script_name')
obj, __ = Script.objects.get_or_create(
module_id=eventrule.action_object_id, name=name, defaults={'is_executable': False}
obj, __ = Script.objects.using(db_alias).get_or_create(
module_id=eventrule.action_object_id,
name=name,
defaults={'is_executable': False}
)
EventRule.objects.using(db_alias).filter(pk=eventrule.pk).update(
action_object_type=script_ct,
action_object_id=obj.id
)
EventRule.objects.filter(pk=eventrule.pk).update(action_object_type=script_ct, action_object_id=obj.id)
class Migration(migrations.Migration):

View File

@@ -1,12 +1,11 @@
# Generated by Django 5.0.4 on 2024-04-24 20:09
from django.db import migrations
def update_dashboard_widgets(apps, schema_editor):
Dashboard = apps.get_model('extras', 'Dashboard')
db_alias = schema_editor.connection.alias
for dashboard in Dashboard.objects.all():
for dashboard in Dashboard.objects.using(db_alias).all():
for key, widget in dashboard.config.items():
if models := widget['config'].get('models'):
models = list(map(lambda x: x.replace('users.netboxgroup', 'users.group'), models))

View File

@@ -3,7 +3,9 @@ from django.db import migrations, models
def update_link_buttons(apps, schema_editor):
CustomLink = apps.get_model('extras', 'CustomLink')
CustomLink.objects.filter(button_class='outline-dark').update(button_class='default')
db_alias = schema_editor.connection.alias
CustomLink.objects.using(db_alias).filter(button_class='outline-dark').update(button_class='default')
class Migration(migrations.Migration):

View File

@@ -3,19 +3,21 @@ from django.db import migrations
def update_content_types(apps, schema_editor):
ContentType = apps.get_model('contenttypes', 'ContentType')
db_alias = schema_editor.connection.alias
# Delete the new ContentTypes effected by the new model in the core app
ContentType.objects.filter(app_label='core', model='objectchange').delete()
ContentType.objects.using(db_alias).filter(app_label='core', model='objectchange').delete()
# Update the app labels of the original ContentTypes for extras.ObjectChange to ensure that any
# foreign key references are preserved
ContentType.objects.filter(app_label='extras', model='objectchange').update(app_label='core')
ContentType.objects.using(db_alias).filter(app_label='extras', model='objectchange').update(app_label='core')
def update_dashboard_widgets(apps, schema_editor):
Dashboard = apps.get_model('extras', 'Dashboard')
db_alias = schema_editor.connection.alias
for dashboard in Dashboard.objects.all():
for dashboard in Dashboard.objects.using(db_alias).all():
for key, widget in dashboard.config.items():
if widget['config'].get('model') == 'extras.objectchange':
widget['config']['model'] = 'core.objectchange'

View File

@@ -6,8 +6,9 @@ from core.events import *
def set_event_types(apps, schema_editor):
EventRule = apps.get_model('extras', 'EventRule')
event_rules = EventRule.objects.all()
db_alias = schema_editor.connection.alias
event_rules = EventRule.objects.using(db_alias).all()
for event_rule in event_rules:
event_rule.event_types = []
if event_rule.type_create:

View File

@@ -6,8 +6,9 @@ def set_null_values(apps, schema_editor):
Replace empty strings with null values.
"""
CustomFieldChoiceSet = apps.get_model('extras', 'CustomFieldChoiceSet')
db_alias = schema_editor.connection.alias
CustomFieldChoiceSet.objects.filter(base_choices='').update(base_choices=None)
CustomFieldChoiceSet.objects.using(db_alias).filter(base_choices='').update(base_choices=None)
class Migration(migrations.Migration):

View File

@@ -8,7 +8,9 @@ def set_kind_default(apps, schema_editor):
Set kind to "info" on any entries with no kind assigned.
"""
JournalEntry = apps.get_model('extras', 'JournalEntry')
JournalEntry.objects.filter(kind='').update(kind=JournalEntryKindChoices.KIND_INFO)
db_alias = schema_editor.connection.alias
JournalEntry.objects.using(db_alias).filter(kind='').update(kind=JournalEntryKindChoices.KIND_INFO)
class Migration(migrations.Migration):

View File

@@ -24,6 +24,17 @@ class JournalEntryIndex(SearchIndex):
display_attrs = ('kind', 'created_by')
@register_search
class TagIndex(SearchIndex):
model = models.Tag
fields = (
('name', 100),
('slug', 110),
('description', 500),
)
display_attrs = ('description',)
@register_search
class WebhookEntryIndex(SearchIndex):
model = models.Webhook

View File

@@ -147,8 +147,7 @@ class IPRangeSerializer(NetBoxModelSerializer):
fields = [
'id', 'url', 'display_url', 'display', 'family', 'start_address', 'end_address', 'size', 'vrf', 'tenant',
'status', 'role', 'description', 'comments', 'tags', 'custom_fields', 'created', 'last_updated',
'mark_populated', 'mark_utilized', 'description', 'comments', 'tags', 'custom_fields', 'created',
'last_updated',
'mark_populated', 'mark_utilized',
]
brief_fields = ('id', 'url', 'display', 'family', 'start_address', 'end_address', 'description')

View File

@@ -66,7 +66,7 @@ class VLANSerializer(NetBoxModelSerializer):
tenant = TenantSerializer(nested=True, required=False, allow_null=True)
status = ChoiceField(choices=VLANStatusChoices, required=False)
role = RoleSerializer(nested=True, required=False, allow_null=True)
qinq_role = ChoiceField(choices=VLANQinQRoleChoices, required=False)
qinq_role = ChoiceField(choices=VLANQinQRoleChoices, required=False, allow_null=True)
qinq_svlan = NestedVLANSerializer(required=False, allow_null=True, default=None)
l2vpn_termination = L2VPNTerminationSerializer(nested=True, read_only=True, allow_null=True)

View File

@@ -1,5 +1,6 @@
from copy import deepcopy
from django.contrib.contenttypes.prefetch import GenericPrefetch
from django.core.exceptions import ObjectDoesNotExist, PermissionDenied
from django.db import transaction
from django.shortcuts import get_object_or_404
@@ -13,6 +14,7 @@ from rest_framework.response import Response
from rest_framework.routers import APIRootView
from rest_framework.views import APIView
from dcim.models import Interface
from ipam import filtersets
from ipam.models import *
from ipam.utils import get_next_available_prefix
@@ -21,6 +23,7 @@ from netbox.api.viewsets.mixins import ObjectValidationMixin
from netbox.config import get_config
from netbox.constants import ADVISORY_LOCK_KEYS
from utilities.api import get_serializer_for_model
from virtualization.models import VMInterface
from . import serializers
@@ -79,7 +82,7 @@ class RoleViewSet(NetBoxModelViewSet):
class PrefixViewSet(NetBoxModelViewSet):
queryset = Prefix.objects.all()
queryset = Prefix.objects.prefetch_related("scope")
serializer_class = serializers.PrefixSerializer
filterset_class = filtersets.PrefixFilterSet
@@ -100,7 +103,17 @@ class IPRangeViewSet(NetBoxModelViewSet):
class IPAddressViewSet(NetBoxModelViewSet):
queryset = IPAddress.objects.all()
queryset = IPAddress.objects.prefetch_related(
GenericPrefetch(
"assigned_object",
[
# serializers are taken according to IPADDRESS_ASSIGNMENT_MODELS
FHRPGroup.objects.all(),
Interface.objects.select_related("cable", "device"),
VMInterface.objects.select_related("virtual_machine"),
],
),
)
serializer_class = serializers.IPAddressSerializer
filterset_class = filtersets.IPAddressFilterSet

View File

@@ -168,9 +168,7 @@ class IPRangeFilter(ContactFilterMixin, TenancyFilterMixin, PrimaryModelFilterMi
status: Annotated['IPRangeStatusEnum', strawberry.lazy('ipam.graphql.enums')] | None = (
strawberry_django.filter_field()
)
role: Annotated['IPAddressRoleEnum', strawberry.lazy('ipam.graphql.enums')] | None = (
strawberry_django.filter_field()
)
role: Annotated['RoleFilter', strawberry.lazy('ipam.graphql.filters')] | None = strawberry_django.filter_field()
mark_utilized: FilterLookup[bool] | None = strawberry_django.filter_field()
@strawberry_django.filter_field()

View File

@@ -11,7 +11,9 @@ def set_vid_ranges(apps, schema_editor):
Convert the min_vid & max_vid fields to a range in the new vid_ranges ArrayField.
"""
VLANGroup = apps.get_model('ipam', 'VLANGroup')
for group in VLANGroup.objects.all():
db_alias = schema_editor.connection.alias
for group in VLANGroup.objects.using(db_alias).all():
group.vid_ranges = [NumericRange(group.min_vid, group.max_vid, bounds='[]')]
group._total_vlan_ids = group.max_vid - group.min_vid + 1
group.save()

View File

@@ -9,9 +9,11 @@ def copy_site_assignments(apps, schema_editor):
ContentType = apps.get_model('contenttypes', 'ContentType')
Prefix = apps.get_model('ipam', 'Prefix')
Site = apps.get_model('dcim', 'Site')
db_alias = schema_editor.connection.alias
Prefix.objects.filter(site__isnull=False).update(
scope_type=ContentType.objects.get_for_model(Site), scope_id=models.F('site_id')
Prefix.objects.using(db_alias).filter(site__isnull=False).update(
scope_type=ContentType.objects.get_for_model(Site),
scope_id=models.F('site_id')
)

View File

@@ -7,15 +7,16 @@ def populate_denormalized_fields(apps, schema_editor):
Copy site ForeignKey values to the scope GFK.
"""
Prefix = apps.get_model('ipam', 'Prefix')
db_alias = schema_editor.connection.alias
prefixes = Prefix.objects.filter(site__isnull=False).prefetch_related('site')
prefixes = Prefix.objects.using(db_alias).filter(site__isnull=False).prefetch_related('site')
for prefix in prefixes:
prefix._region_id = prefix.site.region_id
prefix._site_group_id = prefix.site.group_id
prefix._site_id = prefix.site_id
# Note: Location cannot be set prior to migration
Prefix.objects.bulk_update(prefixes, ['_region', '_site_group', '_site'], batch_size=100)
Prefix.objects.using(db_alias).bulk_update(prefixes, ['_region', '_site_group', '_site'], batch_size=100)
class Migration(migrations.Migration):

View File

@@ -7,9 +7,10 @@ def set_null_values(apps, schema_editor):
"""
FHRPGroup = apps.get_model('ipam', 'FHRPGroup')
IPAddress = apps.get_model('ipam', 'IPAddress')
db_alias = schema_editor.connection.alias
FHRPGroup.objects.filter(auth_type='').update(auth_type=None)
IPAddress.objects.filter(role='').update(role=None)
FHRPGroup.objects.using(db_alias).filter(auth_type='').update(auth_type=None)
IPAddress.objects.using(db_alias).filter(role='').update(role=None)
class Migration(migrations.Migration):

View File

@@ -2,36 +2,38 @@ from django.db import migrations
from django.db.models import F
def populate_service_parent_gfk(apps, schema_config):
def populate_service_parent_gfk(apps, schema_editor):
Service = apps.get_model('ipam', 'Service')
ContentType = apps.get_model('contenttypes', 'ContentType')
Device = apps.get_model('dcim', 'device')
VirtualMachine = apps.get_model('virtualization', 'virtualmachine')
db_alias = schema_editor.connection.alias
Service.objects.filter(device_id__isnull=False).update(
Service.objects.using(db_alias).filter(device_id__isnull=False).update(
parent_object_type=ContentType.objects.get_for_model(Device),
parent_object_id=F('device_id'),
)
Service.objects.filter(virtual_machine_id__isnull=False).update(
Service.objects.using(db_alias).filter(virtual_machine_id__isnull=False).update(
parent_object_type=ContentType.objects.get_for_model(VirtualMachine),
parent_object_id=F('virtual_machine_id'),
)
def repopulate_device_and_virtualmachine_relations(apps, schemaconfig):
def repopulate_device_and_virtualmachine_relations(apps, schema_editor):
Service = apps.get_model('ipam', 'Service')
ContentType = apps.get_model('contenttypes', 'ContentType')
Device = apps.get_model('dcim', 'device')
VirtualMachine = apps.get_model('virtualization', 'virtualmachine')
db_alias = schema_editor.connection.alias
Service.objects.filter(
Service.objects.using(db_alias).filter(
parent_object_type=ContentType.objects.get_for_model(Device),
).update(
device_id=F('parent_object_id')
)
Service.objects.filter(
Service.objects.using(db_alias).filter(
parent_object_type=ContentType.objects.get_for_model(VirtualMachine),
).update(
virtual_machine_id=F('parent_object_id')

View File

@@ -45,10 +45,13 @@ class VRFView(GetRelatedModelsMixin, generic.ObjectView):
instance.import_targets.all(),
orderable=False
)
import_targets_table.configure(request)
export_targets_table = tables.RouteTargetTable(
instance.export_targets.all(),
orderable=False
)
export_targets_table.configure(request)
return {
'related_models': self.get_related_models(request, instance, omit=[Interface, VMInterface]),
@@ -530,6 +533,7 @@ class PrefixView(generic.ObjectView):
exclude=('vrf', 'utilization'),
orderable=False
)
parent_prefix_table.configure(request)
# Duplicate prefixes table
duplicate_prefixes = Prefix.objects.restrict(request.user, 'view').filter(
@@ -544,6 +548,7 @@ class PrefixView(generic.ObjectView):
exclude=('vrf', 'utilization'),
orderable=False
)
duplicate_prefix_table.configure(request)
return {
'aggregate': aggregate,
@@ -709,6 +714,7 @@ class IPRangeView(generic.ObjectView):
exclude=('vrf', 'utilization'),
orderable=False
)
parent_prefixes_table.configure(request)
return {
'parent_prefixes_table': parent_prefixes_table,
@@ -796,6 +802,7 @@ class IPAddressView(generic.ObjectView):
exclude=('vrf', 'utilization'),
orderable=False
)
parent_prefixes_table.configure(request)
# Duplicate IPs table
duplicate_ips = IPAddress.objects.restrict(request.user, 'view').filter(
@@ -811,6 +818,7 @@ class IPAddressView(generic.ObjectView):
duplicate_ips = duplicate_ips.exclude(role=IPAddressRoleChoices.ROLE_ANYCAST)
# Limit to a maximum of 10 duplicates displayed here
duplicate_ips_table = tables.IPAddressTable(duplicate_ips[:10], orderable=False)
duplicate_ips_table.configure(request)
return {
'parent_prefixes_table': parent_prefixes_table,
@@ -888,6 +896,7 @@ class IPAddressAssignView(generic.ObjectView):
# Limit to 100 results
addresses = filtersets.IPAddressFilterSet(request.POST, addresses).qs[:100]
table = tables.IPAddressAssignTable(addresses)
table.configure(request)
return render(request, 'ipam/ipaddress_assign.html', {
'form': form,
@@ -1053,6 +1062,8 @@ class VLANTranslationPolicyView(GetRelatedModelsMixin, generic.ObjectView):
data=instance.rules.all(),
orderable=False
)
vlan_translation_table.configure(request)
return {
'vlan_translation_table': vlan_translation_table,
}
@@ -1170,6 +1181,7 @@ class FHRPGroupView(GetRelatedModelsMixin, generic.ObjectView):
data=FHRPGroupAssignment.objects.restrict(request.user, 'view').filter(group=instance),
orderable=False
)
members_table.configure(request)
members_table.columns.hide('group')
return {
@@ -1289,6 +1301,7 @@ class VLANView(generic.ObjectView):
'vrf', 'scope', 'role', 'tenant'
)
prefix_table = tables.PrefixTable(list(prefixes), exclude=('vlan', 'utilization'), orderable=False)
prefix_table.configure(request)
return {
'prefix_table': prefix_table,

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -24,13 +24,13 @@
"dependencies": {
"@mdi/font": "7.4.47",
"@tabler/core": "1.2.0",
"bootstrap": "5.3.5",
"bootstrap": "5.3.6",
"clipboard": "2.0.11",
"flatpickr": "4.6.13",
"gridstack": "12.1.1",
"gridstack": "12.1.2",
"htmx.org": "2.0.4",
"query-string": "9.1.1",
"sass": "1.87.0",
"query-string": "9.1.2",
"sass": "1.88.0",
"tom-select": "2.4.3",
"typeface-inter": "3.18.1",
"typeface-roboto-mono": "1.1.13"

View File

@@ -106,7 +106,8 @@ function handleSubmit(event: Event): void {
const toast = createToast('danger', 'Error Updating Table Configuration', res.error);
toast.show();
} else {
location.reload();
// Strip any URL query parameters & reload the page
window.location.href = window.location.origin + window.location.pathname;
}
});
}

View File

@@ -1058,6 +1058,11 @@ bootstrap@5.3.5:
resolved "https://registry.yarnpkg.com/bootstrap/-/bootstrap-5.3.5.tgz#be42cfe0d580e97ee1abb7d38ce94f5c393c9bb6"
integrity sha512-ct1CHKtiobRimyGzmsSldEtM03E8fcEX4Tb3dGXz1V8faRwM50+vfHwTzOxB3IlKO7m+9vTH3s/3C6T2EAPeTA==
bootstrap@5.3.6:
version "5.3.6"
resolved "https://registry.yarnpkg.com/bootstrap/-/bootstrap-5.3.6.tgz#fbd91ebaff093f5b191a1c01a8c866d24f9fa6e1"
integrity sha512-jX0GAcRzvdwISuvArXn3m7KZscWWFAf1MKBcnzaN02qWMb3jpMoUX4/qgeiGzqyIb4ojulRzs89UCUmGcFSzTA==
brace-expansion@^1.1.7:
version "1.1.11"
resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd"
@@ -1903,10 +1908,10 @@ graphql@16.10.0:
resolved "https://registry.yarnpkg.com/graphql/-/graphql-16.10.0.tgz#24c01ae0af6b11ea87bf55694429198aaa8e220c"
integrity sha512-AjqGKbDGUFRKIRCP9tCKiIGHyriz2oHEbPIbEtcSLSs4YjReZOIPQQWek4+6hjw62H9QShXHyaGivGiYVLeYFQ==
gridstack@12.1.1:
version "12.1.1"
resolved "https://registry.yarnpkg.com/gridstack/-/gridstack-12.1.1.tgz#623ea5b6560cc9509252db66fd7a529d70bd2d26"
integrity sha512-wpfNUkzVBuHJftRRMRQDpH8DPIO5NBdfE0ioIIVoXFePBzqqVTpfgttSs5IJYqO4Uj5LfnJ2fjOmsFEBqpeSwg==
gridstack@12.1.2:
version "12.1.2"
resolved "https://registry.yarnpkg.com/gridstack/-/gridstack-12.1.2.tgz#784f6d55873bb48fa9230c1284f769c9fbf785a8"
integrity sha512-IC1mkm5xonhAnftwIxsG+B3bawxC61ciKWEvX15ExpVQPbNVN7O9aZZhM7Y/eE4JaIR8PXrdkjd12gMnwNYRLQ==
has-bigints@^1.0.1, has-bigints@^1.0.2:
version "1.0.2"
@@ -2514,10 +2519,10 @@ punycode@^2.1.0:
resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.3.1.tgz#027422e2faec0b25e1549c3e1bd8309b9133b6e5"
integrity sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==
query-string@9.1.1:
version "9.1.1"
resolved "https://registry.yarnpkg.com/query-string/-/query-string-9.1.1.tgz#dbfebb4196aeb2919915f2b2b81b91b965cf03a0"
integrity sha512-MWkCOVIcJP9QSKU52Ngow6bsAWAPlPK2MludXvcrS2bGZSl+T1qX9MZvRIkqUIkGLJquMJHWfsT6eRqUpp4aWg==
query-string@9.1.2:
version "9.1.2"
resolved "https://registry.yarnpkg.com/query-string/-/query-string-9.1.2.tgz#1e4c6a17e2eaab7a282240cf716dec5e72c36cba"
integrity sha512-s3UlTyjxRux4KjwWaJsjh1Mp8zoCkSGKirbD9H89pEM9UOZsfpRZpdfzvsy2/mGlLfC3NnYVpy2gk7jXITHEtA==
dependencies:
decode-uri-component "^0.4.1"
filter-obj "^5.1.0"
@@ -2660,10 +2665,10 @@ safe-regex-test@^1.0.3:
es-errors "^1.3.0"
is-regex "^1.1.4"
sass@1.87.0:
version "1.87.0"
resolved "https://registry.yarnpkg.com/sass/-/sass-1.87.0.tgz#8cceb36fa63fb48a8d5d7f2f4c13b49c524b723e"
integrity sha512-d0NoFH4v6SjEK7BoX810Jsrhj7IQSYHAHLi/iSpgqKc7LaIDshFRlSg5LOymf9FqQhxEHs2W5ZQXlvy0KD45Uw==
sass@1.88.0:
version "1.88.0"
resolved "https://registry.yarnpkg.com/sass/-/sass-1.88.0.tgz#cd1495749bebd9e4aca86e93ee60b3904a107789"
integrity sha512-sF6TWQqjFvr4JILXzG4ucGOLELkESHL+I5QJhh7CNaE+Yge0SI+ehCatsXhJ7ymU1hAFcIS3/PBpjdIbXoyVbg==
dependencies:
chokidar "^4.0.0"
immutable "^5.0.2"

View File

@@ -1,3 +1,3 @@
version: "4.3.0"
version: "4.3.1"
edition: "Community"
published: "2025-05-01"
published: "2025-05-13"

View File

@@ -17,7 +17,7 @@
<i class="mdi mdi-alert"></i>
<strong>{% trans "Unsupported PostgreSQL version" %}.</strong>
{% blocktrans trimmed %}
Ensure that PostgreSQL version 12 or later is in use. You can check this by connecting to the database using
Ensure that PostgreSQL version 14 or later is in use. You can check this by connecting to the database using
NetBox's credentials and issuing a query for <code>SELECT VERSION()</code>.
{% endblocktrans %}
</p>

View File

@@ -30,20 +30,24 @@
<button type="button" data-bs-toggle="modal" title="{% trans "Configure Table" %}" data-bs-target="#{{ table_modal }}" class="btn">
<i class="mdi mdi-cog"></i> {% trans "Configure Table" %}
</button>
<button type="button" class="btn dropdown-toggle dropdown-toggle-split" data-bs-toggle="dropdown" aria-expanded="false">
<span class="visually-hidden">Toggle Dropdown</span>
</button>
<div class="dropdown-menu">
{% if table.config_params %}
<a class="dropdown-item" href="{% url 'extras:tableconfig_add' %}?{{ table.config_params }}&return_url={{ request.path }}" id="table_save_link">Save</a>
{% endif %}
{% if table_configs %}
<hr class="dropdown-divider">
{% for config in table_configs %}
<a class="dropdown-item" href="?tableconfig_id={{ config.pk }}">{{ config }}</a>
{% endfor %}
{% endif %}
</div>
{% if table.config_params or table_configs %}
<button type="button" class="btn dropdown-toggle dropdown-toggle-split" data-bs-toggle="dropdown" aria-expanded="false">
<span class="visually-hidden">{% trans "Toggle Dropdown" %}</span>
</button>
<div class="dropdown-menu">
{% if table.config_params %}
<a class="dropdown-item" href="{% url 'extras:tableconfig_add' %}?{{ table.config_params }}&return_url={{ request.path }}" id="table_save_link">Save</a>
{% endif %}
{% if table.config_params and table_configs %}
<hr class="dropdown-divider">
{% endif %}
{% if table_configs %}
{% for config in table_configs %}
<a class="dropdown-item" href="?tableconfig_id={{ config.pk }}">{{ config }}</a>
{% endfor %}
{% endif %}
</div>
{% endif %}
</div>
{% endif %}
</div>

View File

@@ -6,8 +6,9 @@ def set_null_values(apps, schema_editor):
Replace empty strings with null values.
"""
ContactAssignment = apps.get_model('tenancy', 'ContactAssignment')
db_alias = schema_editor.connection.alias
ContactAssignment.objects.filter(priority='').update(priority=None)
ContactAssignment.objects.using(db_alias).filter(priority='').update(priority=None)
class Migration(migrations.Migration):

View File

@@ -3,10 +3,10 @@ from django.db import migrations, models
def migrate_contact_groups(apps, schema_editor):
Contacts = apps.get_model('tenancy', 'Contact')
Contact = apps.get_model('tenancy', 'Contact')
db_alias = schema_editor.connection.alias
qs = Contacts.objects.filter(group__isnull=False)
for contact in qs:
for contact in Contact.objects.using(db_alias).filter(group__isnull=False):
contact.groups.add(contact.group)

View File

@@ -0,0 +1,71 @@
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('tenancy', '0019_contactgroup_comments_tenantgroup_comments'),
]
operations = [
migrations.SeparateDatabaseAndState(
state_operations=[
# Remove the "through" models from the M2M field
migrations.AlterField(
model_name='contact',
name='groups',
field=models.ManyToManyField(
blank=True,
related_name='contacts',
related_query_name='contact',
to='tenancy.contactgroup'
),
),
# Remove the ContactGroupMembership model
migrations.DeleteModel(
name='ContactGroupMembership',
),
],
database_operations=[
# Rename ContactGroupMembership table
migrations.AlterModelTable(
name='ContactGroupMembership',
table='tenancy_contact_groups',
),
# Rename the 'group' column (also renames its FK constraint)
migrations.RenameField(
model_name='contactgroupmembership',
old_name='group',
new_name='contactgroup',
),
# Rename PK sequence
migrations.RunSQL(
'ALTER TABLE tenancy_contactgroupmembership_id_seq '
'RENAME TO tenancy_contact_groups_id_seq'
),
# Rename indexes
migrations.RunSQL(
'ALTER INDEX tenancy_contactgroupmembership_pkey '
'RENAME TO tenancy_contact_groups_pkey'
),
migrations.RunSQL(
'ALTER INDEX tenancy_contactgroupmembership_contact_id_04a138a7 '
'RENAME TO tenancy_contact_groups_contact_id_84c9d84f'
),
migrations.RunSQL(
'ALTER INDEX tenancy_contactgroupmembership_group_id_bc712dd0 '
'RENAME TO tenancy_contact_groups_contactgroup_id_5c8d6c5a'
),
migrations.RunSQL(
'ALTER INDEX unique_group_name '
'RENAME TO tenancy_contact_groups_contact_id_contactgroup_id_f4434f2c_uniq'
),
# Rename foreign key constraint for contact_id
migrations.RunSQL(
'ALTER TABLE tenancy_contact_groups '
'RENAME CONSTRAINT tenancy_contactgroup_contact_id_04a138a7_fk_tenancy_c '
'TO tenancy_contact_grou_contact_id_84c9d84f_fk_tenancy_c'
),
],
),
]

View File

@@ -13,7 +13,6 @@ __all__ = (
'ContactAssignment',
'Contact',
'ContactGroup',
'ContactGroupMembership',
'ContactRole',
)
@@ -51,7 +50,6 @@ class Contact(PrimaryModel):
groups = models.ManyToManyField(
to='tenancy.ContactGroup',
related_name='contacts',
through='tenancy.ContactGroupMembership',
related_query_name='contact',
blank=True
)
@@ -97,18 +95,6 @@ class Contact(PrimaryModel):
return self.name
class ContactGroupMembership(models.Model):
group = models.ForeignKey(ContactGroup, related_name="+", on_delete=models.CASCADE)
contact = models.ForeignKey(Contact, related_name="+", on_delete=models.CASCADE)
class Meta:
constraints = [
models.UniqueConstraint(fields=['group', 'contact'], name='unique_group_name')
]
verbose_name = _('contact group membership')
verbose_name_plural = _('contact group memberships')
class ContactAssignment(CustomFieldsMixin, ExportTemplatesMixin, TagsMixin, ChangeLoggedModel):
object_type = models.ForeignKey(
to='contenttypes.ContentType',

View File

@@ -15,7 +15,7 @@ class ContactIndex(SearchIndex):
('description', 500),
('comments', 5000),
)
display_attrs = ('group', 'title', 'phone', 'email', 'description')
display_attrs = ('title', 'phone', 'email', 'description')
@register_search

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -3,18 +3,22 @@ from django.db import migrations
def update_content_types(apps, schema_editor):
ContentType = apps.get_model('contenttypes', 'ContentType')
CustomField = apps.get_model('extras', 'CustomField')
db_alias = schema_editor.connection.alias
# Delete the new ContentTypes effected by the new models in the users app
ContentType.objects.filter(app_label='users', model='user').delete()
ContentType.objects.using(db_alias).filter(app_label='users', model='user').delete()
# Update the app labels of the original ContentTypes for auth.User to ensure
# that any foreign key references are preserved
ContentType.objects.filter(app_label='auth', model='user').update(app_label='users')
ContentType.objects.using(db_alias).filter(app_label='auth', model='user').update(app_label='users')
netboxuser_ct = ContentType.objects.filter(app_label='users', model='netboxuser').first()
netboxuser_ct = ContentType.objects.using(db_alias).filter(app_label='users', model='netboxuser').first()
if netboxuser_ct:
user_ct = ContentType.objects.filter(app_label='users', model='user').first()
CustomField = apps.get_model('extras', 'CustomField')
CustomField.objects.filter(related_object_type_id=netboxuser_ct.id).update(related_object_type_id=user_ct.id)
user_ct = ContentType.objects.using(db_alias).filter(app_label='users', model='user').first()
CustomField.objects.using(db_alias).filter(related_object_type_id=netboxuser_ct.id).update(
related_object_type_id=user_ct.id
)
netboxuser_ct.delete()

View File

@@ -9,10 +9,11 @@ def update_custom_fields(apps, schema_editor):
ContentType = apps.get_model('contenttypes', 'ContentType')
CustomField = apps.get_model('extras', 'CustomField')
Group = apps.get_model('users', 'Group')
db_alias = schema_editor.connection.alias
if old_ct := ContentType.objects.filter(app_label='users', model='netboxgroup').first():
if old_ct := ContentType.objects.using(db_alias).filter(app_label='users', model='netboxgroup').first():
new_ct = ContentType.objects.get_for_model(Group)
CustomField.objects.filter(related_object_type=old_ct).update(related_object_type=new_ct)
CustomField.objects.using(db_alias).filter(related_object_type=old_ct).update(related_object_type=new_ct)
class Migration(migrations.Migration):

View File

@@ -1,16 +1,15 @@
# Generated by Django 5.0.5 on 2024-05-15 18:05
from django.db import migrations, models
def update_content_types(apps, schema_editor):
ObjectType = apps.get_model('core', 'ObjectType')
ObjectPermission = apps.get_model('users', 'ObjectPermission')
db_alias = schema_editor.connection.alias
auth_group_ct = ObjectType.objects.filter(app_label='auth', model='group').first()
users_group_ct = ObjectType.objects.filter(app_label='users', model='group').first()
auth_group_ct = ObjectType.objects.using(db_alias).filter(app_label='auth', model='group').first()
users_group_ct = ObjectType.objects.using(db_alias).filter(app_label='users', model='group').first()
if auth_group_ct and users_group_ct:
perms = ObjectPermission.objects.filter(object_types__in=[auth_group_ct])
perms = ObjectPermission.objects.using(db_alias).filter(object_types__in=[auth_group_ct])
for perm in perms:
perm.object_types.remove(auth_group_ct)
perm.object_types.add(users_group_ct)

View File

@@ -77,6 +77,7 @@ class UserView(generic.ObjectView):
def get_extra_context(self, request, instance):
changelog = ObjectChange.objects.restrict(request.user, 'view').filter(user=instance)[:20]
changelog_table = ObjectChangeTable(changelog)
changelog_table.configure(request)
return {
'changelog_table': changelog_table,

View File

@@ -4,18 +4,19 @@ from netbox.settings import DISK_BASE_UNIT
def convert_disk_size(apps, schema_editor):
VirtualMachine = apps.get_model('virtualization', 'VirtualMachine')
VirtualMachine.objects.filter(disk__isnull=False).update(disk=F('disk') * DISK_BASE_UNIT)
VirtualDisk = apps.get_model('virtualization', 'VirtualDisk')
VirtualDisk.objects.filter(size__isnull=False).update(size=F('size') * DISK_BASE_UNIT)
VirtualMachine = apps.get_model('virtualization', 'VirtualMachine')
db_alias = schema_editor.connection.alias
VirtualMachine.objects.using(db_alias).filter(disk__isnull=False).update(disk=F('disk') * DISK_BASE_UNIT)
VirtualDisk.objects.using(db_alias).filter(size__isnull=False).update(size=F('size') * DISK_BASE_UNIT)
# Recalculate disk size on all VMs with virtual disks
id_list = VirtualDisk.objects.values_list('virtual_machine_id').distinct()
virtual_machines = VirtualMachine.objects.filter(id__in=id_list)
id_list = VirtualDisk.objects.using(db_alias).values_list('virtual_machine_id').distinct()
virtual_machines = VirtualMachine.objects.using(db_alias).filter(id__in=id_list)
for vm in virtual_machines:
vm.disk = vm.virtualdisks.aggregate(Sum('size', default=0))['size__sum']
VirtualMachine.objects.bulk_update(virtual_machines, fields=['disk'])
VirtualMachine.objects.using(db_alias).bulk_update(virtual_machines, fields=['disk'])
class Migration(migrations.Migration):

View File

@@ -6,8 +6,9 @@ def set_null_values(apps, schema_editor):
Replace empty strings with null values.
"""
VMInterface = apps.get_model('virtualization', 'VMInterface')
db_alias = schema_editor.connection.alias
VMInterface.objects.filter(mode='').update(mode=None)
VMInterface.objects.using(db_alias).filter(mode='').update(mode=None)
class Migration(migrations.Migration):

View File

@@ -9,9 +9,11 @@ def copy_site_assignments(apps, schema_editor):
ContentType = apps.get_model('contenttypes', 'ContentType')
Cluster = apps.get_model('virtualization', 'Cluster')
Site = apps.get_model('dcim', 'Site')
db_alias = schema_editor.connection.alias
Cluster.objects.filter(site__isnull=False).update(
scope_type=ContentType.objects.get_for_model(Site), scope_id=models.F('site_id')
Cluster.objects.using(db_alias).filter(site__isnull=False).update(
scope_type=ContentType.objects.get_for_model(Site),
scope_id=models.F('site_id')
)

View File

@@ -7,15 +7,16 @@ def populate_denormalized_fields(apps, schema_editor):
Copy the denormalized fields for _region, _site_group and _site from existing site field.
"""
Cluster = apps.get_model('virtualization', 'Cluster')
db_alias = schema_editor.connection.alias
clusters = Cluster.objects.filter(site__isnull=False).prefetch_related('site')
clusters = Cluster.objects.using(db_alias).filter(site__isnull=False).prefetch_related('site')
for cluster in clusters:
cluster._region_id = cluster.site.region_id
cluster._site_group_id = cluster.site.group_id
cluster._site_id = cluster.site_id
# Note: Location cannot be set prior to migration
Cluster.objects.bulk_update(clusters, ['_region', '_site_group', '_site'], batch_size=100)
Cluster.objects.using(db_alias).bulk_update(clusters, ['_region', '_site_group', '_site'], batch_size=100)
class Migration(migrations.Migration):

View File

@@ -6,19 +6,24 @@ def populate_mac_addresses(apps, schema_editor):
ContentType = apps.get_model('contenttypes', 'ContentType')
VMInterface = apps.get_model('virtualization', 'VMInterface')
MACAddress = apps.get_model('dcim', 'MACAddress')
db_alias = schema_editor.connection.alias
vminterface_ct = ContentType.objects.get_for_model(VMInterface)
mac_addresses = [
MACAddress(
mac_address=vminterface.mac_address, assigned_object_type=vminterface_ct, assigned_object_id=vminterface.pk
mac_address=vminterface.mac_address,
assigned_object_type=vminterface_ct,
assigned_object_id=vminterface.pk
)
for vminterface in VMInterface.objects.filter(mac_address__isnull=False)
for vminterface in VMInterface.objects.using(db_alias).filter(mac_address__isnull=False)
]
MACAddress.objects.bulk_create(mac_addresses, batch_size=100)
MACAddress.objects.using(db_alias).bulk_create(mac_addresses, batch_size=100)
# TODO: Optimize interface updates
for mac_address in mac_addresses:
VMInterface.objects.filter(pk=mac_address.assigned_object_id).update(primary_mac_address=mac_address)
VMInterface.objects.using(db_alias).filter(pk=mac_address.assigned_object_id).update(
primary_mac_address=mac_address
)
class Migration(migrations.Migration):

View File

@@ -350,6 +350,7 @@ class ClusterRemoveDevicesView(generic.ObjectEditView):
selected_objects = Device.objects.filter(pk__in=form.initial['pk'])
device_table = DeviceTable(list(selected_objects), orderable=False)
device_table.configure(request)
return render(request, self.template_name, {
'form': form,
@@ -505,6 +506,7 @@ class VMInterfaceView(generic.ObjectView):
exclude=('virtual_machine',),
orderable=False
)
child_interfaces_tables.configure(request)
# Get VLAN translation rules
vlan_translation_table = None
@@ -513,6 +515,7 @@ class VMInterfaceView(generic.ObjectView):
data=instance.vlan_translation_policy.rules.all(),
orderable=False
)
vlan_translation_table.configure(request)
# Get assigned VLANs and annotate whether each is tagged or untagged
vlans = []
@@ -527,6 +530,7 @@ class VMInterfaceView(generic.ObjectView):
data=vlans,
orderable=False
)
vlan_table.configure(request)
return {
'child_interfaces_table': child_interfaces_tables,

Some files were not shown because too many files have changed in this diff Show More