mirror of
https://github.com/netbox-community/netbox.git
synced 2026-02-09 18:37:42 +01:00
Compare commits
23 Commits
20490-rest
...
21240-redi
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a3be2b6d54 | ||
|
|
97847662ac | ||
|
|
c23b656c26 | ||
|
|
6062aa71b1 | ||
|
|
ee6cbdcefe | ||
|
|
de1c5120dd | ||
|
|
87d2e02c85 | ||
|
|
cbbc4f74b8 | ||
|
|
be5bd74d4e | ||
|
|
cf12bb5bf5 | ||
|
|
c060eef1d8 | ||
|
|
96f0debe6e | ||
|
|
b26c7f34cd | ||
|
|
d6428c6aa4 | ||
|
|
e3eca98897 | ||
|
|
cdc735fe41 | ||
|
|
aa4a9da955 | ||
|
|
5c6fc2fb6f | ||
|
|
ad29cb2d66 | ||
|
|
bec5ecf6a9 | ||
|
|
c98f55dbd2 | ||
|
|
dfe20532a1 | ||
|
|
43ae52089f |
@@ -15,7 +15,7 @@ body:
|
||||
attributes:
|
||||
label: NetBox version
|
||||
description: What version of NetBox are you currently running?
|
||||
placeholder: v4.5.1
|
||||
placeholder: v4.5.2
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/02-bug_report.yaml
vendored
2
.github/ISSUE_TEMPLATE/02-bug_report.yaml
vendored
@@ -27,7 +27,7 @@ body:
|
||||
attributes:
|
||||
label: NetBox Version
|
||||
description: What version of NetBox are you currently running?
|
||||
placeholder: v4.5.1
|
||||
placeholder: v4.5.2
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/03-performance.yaml
vendored
2
.github/ISSUE_TEMPLATE/03-performance.yaml
vendored
@@ -8,7 +8,7 @@ body:
|
||||
attributes:
|
||||
label: NetBox Version
|
||||
description: What version of NetBox are you currently running?
|
||||
placeholder: v4.5.1
|
||||
placeholder: v4.5.2
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
|
||||
@@ -85,7 +85,7 @@ drf-spectacular-sidecar
|
||||
feedparser
|
||||
|
||||
# WSGI HTTP server
|
||||
# https://docs.gunicorn.org/en/latest/news.html
|
||||
# https://gunicorn.org/news/
|
||||
gunicorn
|
||||
|
||||
# Platform-agnostic template rendering engine
|
||||
|
||||
4830
contrib/openapi.json
4830
contrib/openapi.json
File diff suppressed because it is too large
Load Diff
@@ -8,7 +8,7 @@ This is a mapping of models to [custom validators](../customization/custom-valid
|
||||
|
||||
```python
|
||||
CUSTOM_VALIDATORS = {
|
||||
"dcim.site": [
|
||||
"dcim.Site": [
|
||||
{
|
||||
"name": {
|
||||
"min_length": 5,
|
||||
@@ -17,12 +17,15 @@ CUSTOM_VALIDATORS = {
|
||||
},
|
||||
"my_plugin.validators.Validator1"
|
||||
],
|
||||
"dcim.device": [
|
||||
"dcim.Device": [
|
||||
"my_plugin.validators.Validator1"
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
!!! info "Case-Insensitive Model Names"
|
||||
Model identifiers are case-insensitive. Both `dcim.site` and `dcim.Site` are valid and equivalent.
|
||||
|
||||
---
|
||||
|
||||
## FIELD_CHOICES
|
||||
@@ -53,6 +56,9 @@ FIELD_CHOICES = {
|
||||
}
|
||||
```
|
||||
|
||||
!!! info "Case-Insensitive Field Identifiers"
|
||||
Field identifiers are case-insensitive. Both `dcim.Site.status` and `dcim.site.status` are valid and equivalent.
|
||||
|
||||
The following model fields support configurable choices:
|
||||
|
||||
* `circuits.Circuit.status`
|
||||
@@ -98,7 +104,7 @@ This is a mapping of models to [custom validators](../customization/custom-valid
|
||||
|
||||
```python
|
||||
PROTECTION_RULES = {
|
||||
"dcim.site": [
|
||||
"dcim.Site": [
|
||||
{
|
||||
"status": {
|
||||
"eq": "decommissioning"
|
||||
@@ -108,3 +114,6 @@ PROTECTION_RULES = {
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
!!! info "Case-Insensitive Model Names"
|
||||
Model identifiers are case-insensitive. Both `dcim.site` and `dcim.Site` are valid and equivalent.
|
||||
|
||||
@@ -144,7 +144,7 @@ Then, compile these portable (`.po`) files for use in the application:
|
||||
|
||||
* Update the version number and published date in `netbox/release.yaml`. Add or remove the designation (e.g. `beta1`) if applicable.
|
||||
* Copy the version number from `release.yaml` to `pyproject.toml` in the project root.
|
||||
* Update the example version numbers in the feature request and bug report templates under `.github/ISSUE_TEMPLATES/`.
|
||||
* Update the example version numbers in the feature request, bug report, and performance templates under `.github/ISSUE_TEMPLATES/`.
|
||||
* Add a section for this release at the top of the changelog page for the minor version (e.g. `docs/release-notes/version-4.2.md`) listing all relevant changes made in this release.
|
||||
|
||||
!!! tip
|
||||
|
||||
@@ -133,23 +133,67 @@ The field "class_type" is an easy way to distinguish what type of object it is w
|
||||
|
||||
## Pagination
|
||||
|
||||
Queries can be paginated by specifying pagination in the query and supplying an offset and optionaly a limit in the query. If no limit is given, a default of 100 is used. Queries are not paginated unless requested in the query. An example paginated query is shown below:
|
||||
The GraphQL API supports two types of pagination. Offset-based pagination operates using an offset relative to the first record in a set, specified by the `offset` parameter. For example, the response to a request specifying an offset of 100 will contain the 101st and later matching records. Offset-based pagination feels very natural, but its performance can suffer when dealing with large data sets due to the overhead involved in calculating the relative offset.
|
||||
|
||||
The alternative approach is cursor-based pagination, which operates using absolute (rather than relative) primary key values. (These are the numeric IDs assigned to each object in the database.) When using cursor-based pagination, the response will contain records with a primary key greater than or equal to the specified start value, up to the maximum number of results. This strategy requires keeping track of the last seen primary key from each response when paginating through data, but is extremely performant. The cursor is specified by passing the starting object ID via the `start` parameter.
|
||||
|
||||
To ensure consistent ordering, objects will always be ordered by their primary keys when cursor-based pagination is used.
|
||||
|
||||
!!! note "Cursor-based pagination was introduced in NetBox v4.5.2."
|
||||
|
||||
Both pagination strategies support passing an optional `limit` parameter. In both approaches, this specifies the maximum number of objects to include in the response. If no limit is specified, a default value of 100 is used.
|
||||
|
||||
### Offset Pagination
|
||||
|
||||
The first page will have an `offset` of zero, or the `offset` parameter will be omitted:
|
||||
|
||||
```
|
||||
query {
|
||||
device_list(pagination: { offset: 0, limit: 20 }) {
|
||||
device_list(pagination: {offset: 0, limit: 20}) {
|
||||
id
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
The second page will have an offset equal to the size of the first page. If the number of records is less than the specified limit, there are no more records to process. For example, if a request specifies a `limit` of 20 but returns only 13 records, we can conclude that this is the final page of records.
|
||||
|
||||
```
|
||||
query {
|
||||
device_list(pagination: {offset: 20, limit: 20}) {
|
||||
id
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Cursor Pagination
|
||||
|
||||
Set the `start` value to zero to fetch the first page. Note that if the `start` parameter is omitted, offset-based pagination will be used by default.
|
||||
|
||||
```
|
||||
query {
|
||||
device_list(pagination: {start: 0, limit: 20}) {
|
||||
id
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
To determine the `start` value for the next page, add 1 to the primary key (`id`) of the last record in the previous page.
|
||||
|
||||
For example, if the ID of the last record in the previous response was 123, we would specify a `start` value of 124:
|
||||
|
||||
```
|
||||
query {
|
||||
device_list(pagination: {start: 124, limit: 20}) {
|
||||
id
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
This will return up to 20 records with an ID greater than or equal to 124.
|
||||
|
||||
## Authentication
|
||||
|
||||
NetBox's GraphQL API uses the same API authentication tokens as its REST API. Authentication tokens are included with requests by attaching an `Authorization` HTTP header in the following form:
|
||||
|
||||
```
|
||||
Authorization: Token $TOKEN
|
||||
```
|
||||
NetBox's GraphQL API uses the same API authentication tokens as its REST API. See the [REST API authentication](./rest-api.md#authentication) documentation for further detail.
|
||||
|
||||
## Disabling the GraphQL API
|
||||
|
||||
|
||||
@@ -1,5 +1,53 @@
|
||||
# NetBox v4.5
|
||||
|
||||
## v4.5.2 (2026-02-03)
|
||||
|
||||
### Enhancements
|
||||
|
||||
* [#15801](https://github.com/netbox-community/netbox/issues/15801) - Add link peer and connection columns to the VLAN device interfaces table
|
||||
* [#19221](https://github.com/netbox-community/netbox/issues/19221) - Truncate long image attachment filenames in the UI
|
||||
* [#19869](https://github.com/netbox-community/netbox/issues/19869) - Display peer connections for LAG member interfaces
|
||||
* [#20052](https://github.com/netbox-community/netbox/issues/20052) - Increase logging level of error message when a custom script fails to load
|
||||
* [#20172](https://github.com/netbox-community/netbox/issues/20172) - Add `cabled` filter for interfaces in GraphQL API
|
||||
* [#21081](https://github.com/netbox-community/netbox/issues/21081) - Add owner group table columns & filters across all supported object list views
|
||||
* [#21088](https://github.com/netbox-community/netbox/issues/21088) - Add max depth and max length dropdowns for child prefix views
|
||||
* [#21110](https://github.com/netbox-community/netbox/issues/21110) - Support cursor-based pagination in GraphQL API
|
||||
* [#21201](https://github.com/netbox-community/netbox/issues/21201) - Pre-populate GenericForeignKey form fields when cloning
|
||||
* [#21209](https://github.com/netbox-community/netbox/issues/21209) - Ignore case sensitivity for configuration parameters which specify an app label and model name
|
||||
* [#21228](https://github.com/netbox-community/netbox/issues/21228) - Support image attachments for rack types
|
||||
* [#21244](https://github.com/netbox-community/netbox/issues/21244) - Enable omitting specific fields from REST API responses with `?omit=` parameter
|
||||
|
||||
### Performance Improvements
|
||||
|
||||
* [#21249](https://github.com/netbox-community/netbox/issues/21249) - Avoid extraneous user query when no event rules are present
|
||||
* [#21259](https://github.com/netbox-community/netbox/issues/21259) - Cache ObjectType lookups for the duration of a request
|
||||
* [#21260](https://github.com/netbox-community/netbox/issues/21260) - Defer object serialization for events pipeline processing
|
||||
* [#21263](https://github.com/netbox-community/netbox/issues/21263) - Prefetch related objects after creating/updating objects via REST API
|
||||
* [#21300](https://github.com/netbox-community/netbox/issues/21300) - Cache custom field lookups for the duration of a request
|
||||
* [#21302](https://github.com/netbox-community/netbox/issues/21302) - Avoid redundant uniqueness checks in ValidatedModelSerializer
|
||||
* [#21303](https://github.com/netbox-community/netbox/issues/21303) - Cache post-change snapshot on each instance after serialization
|
||||
* [#21327](https://github.com/netbox-community/netbox/issues/21327) - Always leverage `get_by_natural_key()` to resolve ContentTypes
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* [#20212](https://github.com/netbox-community/netbox/issues/20212) - Fix support for image attachment thumbnails when using S3 storage
|
||||
* [#20383](https://github.com/netbox-community/netbox/issues/20383) - When editing a device, clearing the assigned unit should also clear the rack face selection
|
||||
* [#20902](https://github.com/netbox-community/netbox/issues/20902) - Avoid `SyncError` exception when Git URL contains an embedded username
|
||||
* [#20977](https://github.com/netbox-community/netbox/issues/20977) - "Run again" button should respect script variable defaults
|
||||
* [#21115](https://github.com/netbox-community/netbox/issues/21115) - Include `attribute_data` in ModuleType YAML export
|
||||
* [#21129](https://github.com/netbox-community/netbox/issues/21129) - Store queue name on the Job model to ensure deletion of associated RQ task when a non-default queue is used
|
||||
* [#21168](https://github.com/netbox-community/netbox/issues/21168) - Fix Application Service cloning to preserve parent object
|
||||
* [#21173](https://github.com/netbox-community/netbox/issues/21173) - Ensure all plugin menu items are registered regardless of initialization order
|
||||
* [#21176](https://github.com/netbox-community/netbox/issues/21176) - Remove checkboxes from IP ranges in mixed-type tables
|
||||
* [#21202](https://github.com/netbox-community/netbox/issues/21202) - Fix scoped form cloning clearing the `scope` field when `scope_type` changes
|
||||
* [#21214](https://github.com/netbox-community/netbox/issues/21214) - Clean up AutoSyncRecord when detaching from DataSource
|
||||
* [#21242](https://github.com/netbox-community/netbox/issues/21242) - Navigation menu items for authentication should not require `staff_only` permission
|
||||
* [#21254](https://github.com/netbox-community/netbox/issues/21254) - Fix `AttributeError` exception when checking for latest release
|
||||
* [#21262](https://github.com/netbox-community/netbox/issues/21262) - Assigned scope should be replicated when cloning a prefix
|
||||
* [#21269](https://github.com/netbox-community/netbox/issues/21269) - Fix replication of front/rear port assignments from the module type when installing a module
|
||||
|
||||
---
|
||||
|
||||
## v4.5.1 (2026-01-20)
|
||||
|
||||
### Enhancements
|
||||
|
||||
@@ -21,11 +21,24 @@ __all__ = (
|
||||
'GitBackend',
|
||||
'LocalBackend',
|
||||
'S3Backend',
|
||||
'url_has_embedded_credentials',
|
||||
)
|
||||
|
||||
logger = logging.getLogger('netbox.data_backends')
|
||||
|
||||
|
||||
def url_has_embedded_credentials(url):
|
||||
"""
|
||||
Check if a URL contains embedded credentials (username in the URL).
|
||||
|
||||
URLs like 'https://user@bitbucket.org/...' have embedded credentials.
|
||||
This is used to avoid passing explicit credentials to dulwich when the
|
||||
URL already contains them, which would cause authentication conflicts.
|
||||
"""
|
||||
parsed = urlparse(url)
|
||||
return bool(parsed.username)
|
||||
|
||||
|
||||
@register_data_backend()
|
||||
class LocalBackend(DataBackend):
|
||||
name = 'local'
|
||||
@@ -102,7 +115,9 @@ class GitBackend(DataBackend):
|
||||
clone_args['pool_manager'] = ProxyPoolManager(self.socks_proxy)
|
||||
|
||||
if self.url_scheme in ('http', 'https'):
|
||||
if self.params.get('username'):
|
||||
# Only pass explicit credentials if URL doesn't already contain embedded username
|
||||
# to avoid credential conflicts (see #20902)
|
||||
if not url_has_embedded_credentials(self.url) and self.params.get('username'):
|
||||
clone_args.update(
|
||||
{
|
||||
"username": self.params.get('username'),
|
||||
|
||||
@@ -18,6 +18,7 @@ from extras.events import enqueue_event
|
||||
from extras.models import Tag
|
||||
from extras.utils import run_validators
|
||||
from netbox.config import get_config
|
||||
from utilities.data import get_config_value_ci
|
||||
from netbox.context import current_request, events_queue
|
||||
from netbox.models.features import ChangeLoggingMixin, get_model_features, model_is_public
|
||||
from utilities.exceptions import AbortRequest
|
||||
@@ -168,7 +169,7 @@ def handle_deleted_object(sender, instance, **kwargs):
|
||||
# to queueing any events for the object being deleted, in case a validation error is
|
||||
# raised, causing the deletion to fail.
|
||||
model_name = f'{sender._meta.app_label}.{sender._meta.model_name}'
|
||||
validators = get_config().PROTECTION_RULES.get(model_name, [])
|
||||
validators = get_config_value_ci(get_config().PROTECTION_RULES, model_name, default=[])
|
||||
try:
|
||||
run_validators(instance, validators)
|
||||
except ValidationError as e:
|
||||
|
||||
116
netbox/core/tests/test_data_backends.py
Normal file
116
netbox/core/tests/test_data_backends.py
Normal file
@@ -0,0 +1,116 @@
|
||||
from unittest import skipIf
|
||||
from unittest.mock import patch
|
||||
|
||||
from django.test import TestCase
|
||||
|
||||
from core.data_backends import url_has_embedded_credentials
|
||||
|
||||
try:
|
||||
import dulwich # noqa: F401
|
||||
DULWICH_AVAILABLE = True
|
||||
except ImportError:
|
||||
DULWICH_AVAILABLE = False
|
||||
|
||||
|
||||
class URLEmbeddedCredentialsTests(TestCase):
|
||||
def test_url_with_embedded_username(self):
|
||||
self.assertTrue(url_has_embedded_credentials('https://myuser@bitbucket.org/workspace/repo.git'))
|
||||
|
||||
def test_url_without_embedded_username(self):
|
||||
self.assertFalse(url_has_embedded_credentials('https://bitbucket.org/workspace/repo.git'))
|
||||
|
||||
def test_url_with_username_and_password(self):
|
||||
self.assertTrue(url_has_embedded_credentials('https://user:pass@bitbucket.org/workspace/repo.git'))
|
||||
|
||||
def test_various_providers_with_embedded_username(self):
|
||||
urls = [
|
||||
'https://user@bitbucket.org/workspace/repo.git',
|
||||
'https://user@github.com/owner/repo.git',
|
||||
'https://deploy-key@gitlab.com/group/project.git',
|
||||
'http://user@internal-git.example.com/repo.git',
|
||||
]
|
||||
for url in urls:
|
||||
with self.subTest(url=url):
|
||||
self.assertTrue(url_has_embedded_credentials(url))
|
||||
|
||||
def test_various_providers_without_embedded_username(self):
|
||||
"""Various Git providers without embedded usernames."""
|
||||
urls = [
|
||||
'https://bitbucket.org/workspace/repo.git',
|
||||
'https://github.com/owner/repo.git',
|
||||
'https://gitlab.com/group/project.git',
|
||||
'http://internal-git.example.com/repo.git',
|
||||
]
|
||||
for url in urls:
|
||||
with self.subTest(url=url):
|
||||
self.assertFalse(url_has_embedded_credentials(url))
|
||||
|
||||
def test_ssh_url(self):
|
||||
# git@host:path format doesn't parse as having a username in the traditional sense
|
||||
self.assertFalse(url_has_embedded_credentials('git@github.com:owner/repo.git'))
|
||||
|
||||
def test_file_url(self):
|
||||
self.assertFalse(url_has_embedded_credentials('file:///path/to/repo'))
|
||||
|
||||
|
||||
@skipIf(not DULWICH_AVAILABLE, "dulwich is not installed")
|
||||
class GitBackendCredentialIntegrationTests(TestCase):
|
||||
"""
|
||||
Integration tests that verify GitBackend correctly applies credential logic.
|
||||
|
||||
These tests require dulwich to be installed and verify the full integration
|
||||
of the credential handling in GitBackend.fetch().
|
||||
"""
|
||||
|
||||
def _get_clone_kwargs(self, url, **params):
|
||||
from core.data_backends import GitBackend
|
||||
|
||||
backend = GitBackend(url=url, **params)
|
||||
|
||||
with patch('dulwich.porcelain.clone') as mock_clone, \
|
||||
patch('dulwich.porcelain.NoneStream'):
|
||||
try:
|
||||
with backend.fetch():
|
||||
pass
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if mock_clone.called:
|
||||
return mock_clone.call_args.kwargs
|
||||
return {}
|
||||
|
||||
def test_url_with_embedded_username_skips_explicit_credentials(self):
|
||||
kwargs = self._get_clone_kwargs(
|
||||
url='https://myuser@bitbucket.org/workspace/repo.git',
|
||||
username='myuser',
|
||||
password='my-api-key'
|
||||
)
|
||||
|
||||
self.assertEqual(kwargs.get('username'), None)
|
||||
self.assertEqual(kwargs.get('password'), None)
|
||||
|
||||
def test_url_without_embedded_username_passes_explicit_credentials(self):
|
||||
kwargs = self._get_clone_kwargs(
|
||||
url='https://bitbucket.org/workspace/repo.git',
|
||||
username='myuser',
|
||||
password='my-api-key'
|
||||
)
|
||||
|
||||
self.assertEqual(kwargs.get('username'), 'myuser')
|
||||
self.assertEqual(kwargs.get('password'), 'my-api-key')
|
||||
|
||||
def test_url_with_embedded_username_no_explicit_credentials(self):
|
||||
kwargs = self._get_clone_kwargs(
|
||||
url='https://myuser@bitbucket.org/workspace/repo.git'
|
||||
)
|
||||
|
||||
self.assertEqual(kwargs.get('username'), None)
|
||||
self.assertEqual(kwargs.get('password'), None)
|
||||
|
||||
def test_public_repo_no_credentials(self):
|
||||
kwargs = self._get_clone_kwargs(
|
||||
url='https://github.com/public/repo.git'
|
||||
)
|
||||
|
||||
self.assertEqual(kwargs.get('username'), None)
|
||||
self.assertEqual(kwargs.get('password'), None)
|
||||
@@ -373,7 +373,7 @@ class Rack(ContactsMixin, ImageAttachmentsMixin, TrackingModelMixin, RackBase):
|
||||
super().clean()
|
||||
|
||||
# Validate location/site assignment
|
||||
if self.site and self.location and self.location.site != self.site:
|
||||
if self.site_id and self.location_id and self.location.site_id != self.site_id:
|
||||
raise ValidationError(_("Assigned location must belong to parent site ({site}).").format(site=self.site))
|
||||
|
||||
# Validate outer dimensions and unit
|
||||
|
||||
@@ -4,7 +4,6 @@ from drf_spectacular.utils import extend_schema_field
|
||||
from rest_framework.fields import Field
|
||||
from rest_framework.serializers import ValidationError
|
||||
|
||||
from core.models import ObjectType
|
||||
from extras.choices import CustomFieldTypeChoices
|
||||
from extras.constants import CUSTOMFIELD_EMPTY_VALUES
|
||||
from extras.models import CustomField
|
||||
@@ -24,13 +23,9 @@ class CustomFieldDefaultValues:
|
||||
def __call__(self, serializer_field):
|
||||
self.model = serializer_field.parent.Meta.model
|
||||
|
||||
# Retrieve the CustomFields for the parent model
|
||||
object_type = ObjectType.objects.get_for_model(self.model)
|
||||
fields = CustomField.objects.filter(object_types=object_type)
|
||||
|
||||
# Populate the default value for each CustomField
|
||||
# Populate the default value for each CustomField on the model
|
||||
value = {}
|
||||
for field in fields:
|
||||
for field in CustomField.objects.get_for_model(self.model):
|
||||
if field.default is not None:
|
||||
value[field.name] = field.default
|
||||
else:
|
||||
@@ -47,8 +42,7 @@ class CustomFieldsDataField(Field):
|
||||
Cache CustomFields assigned to this model to avoid redundant database queries
|
||||
"""
|
||||
if not hasattr(self, '_custom_fields'):
|
||||
object_type = ObjectType.objects.get_for_model(self.parent.Meta.model)
|
||||
self._custom_fields = CustomField.objects.filter(object_types=object_type)
|
||||
self._custom_fields = CustomField.objects.get_for_model(self.parent.Meta.model)
|
||||
return self._custom_fields
|
||||
|
||||
def to_representation(self, obj):
|
||||
|
||||
@@ -75,10 +75,11 @@ def get_bookmarks_object_type_choices():
|
||||
def get_models_from_content_types(content_types):
|
||||
"""
|
||||
Return a list of models corresponding to the given content types, identified by natural key.
|
||||
Accepts both lowercase (e.g. "dcim.site") and PascalCase (e.g. "dcim.Site") model names.
|
||||
"""
|
||||
models = []
|
||||
for content_type_id in content_types:
|
||||
app_label, model_name = content_type_id.split('.')
|
||||
app_label, model_name = content_type_id.lower().split('.')
|
||||
try:
|
||||
content_type = ObjectType.objects.get_by_natural_key(app_label, model_name)
|
||||
if content_type.model_class():
|
||||
|
||||
@@ -51,18 +51,26 @@ def serialize_for_event(instance):
|
||||
|
||||
|
||||
def get_snapshots(instance, event_type):
|
||||
snapshots = {
|
||||
'prechange': getattr(instance, '_prechange_snapshot', None),
|
||||
'postchange': None,
|
||||
}
|
||||
if event_type != OBJECT_DELETED:
|
||||
# Use model's serialize_object() method if defined; fall back to serialize_object() utility function
|
||||
if hasattr(instance, 'serialize_object'):
|
||||
snapshots['postchange'] = instance.serialize_object()
|
||||
else:
|
||||
snapshots['postchange'] = serialize_object(instance)
|
||||
"""
|
||||
Return a dictionary of pre- and post-change snapshots for the given instance.
|
||||
"""
|
||||
if event_type == OBJECT_DELETED:
|
||||
# Post-change snapshot must be empty for deleted objects
|
||||
postchange_snapshot = None
|
||||
elif hasattr(instance, '_postchange_snapshot'):
|
||||
# Use the cached post-change snapshot if one is available
|
||||
postchange_snapshot = instance._postchange_snapshot
|
||||
elif hasattr(instance, 'serialize_object'):
|
||||
# Use model's serialize_object() method if defined
|
||||
postchange_snapshot = instance.serialize_object()
|
||||
else:
|
||||
# Fall back to the serialize_object() utility function
|
||||
postchange_snapshot = serialize_object(instance)
|
||||
|
||||
return snapshots
|
||||
return {
|
||||
'prechange': getattr(instance, '_prechange_snapshot', None),
|
||||
'postchange': postchange_snapshot,
|
||||
}
|
||||
|
||||
|
||||
def enqueue_event(queue, instance, request, event_type):
|
||||
|
||||
@@ -19,6 +19,7 @@ from django.utils.translation import gettext_lazy as _
|
||||
from core.models import ObjectType
|
||||
from extras.choices import *
|
||||
from extras.data import CHOICE_SETS
|
||||
from netbox.context import query_cache
|
||||
from netbox.models import ChangeLoggedModel
|
||||
from netbox.models.features import CloningMixin, ExportTemplatesMixin
|
||||
from netbox.models.mixins import OwnerMixin
|
||||
@@ -58,8 +59,20 @@ class CustomFieldManager(models.Manager.from_queryset(RestrictedQuerySet)):
|
||||
"""
|
||||
Return all CustomFields assigned to the given model.
|
||||
"""
|
||||
# Check the request cache before hitting the database
|
||||
cache = query_cache.get()
|
||||
if cache is not None:
|
||||
if custom_fields := cache['custom_fields'].get(model._meta.model):
|
||||
return custom_fields
|
||||
|
||||
content_type = ObjectType.objects.get_for_model(model._meta.concrete_model)
|
||||
return self.get_queryset().filter(object_types=content_type)
|
||||
custom_fields = self.get_queryset().filter(object_types=content_type)
|
||||
|
||||
# Populate the request cache to avoid redundant lookups
|
||||
if cache is not None:
|
||||
cache['custom_fields'][model._meta.model] = custom_fields
|
||||
|
||||
return custom_fields
|
||||
|
||||
def get_defaults_for_model(self, model):
|
||||
"""
|
||||
|
||||
@@ -9,6 +9,7 @@ from extras.models import EventRule, Notification, Subscription
|
||||
from netbox.config import get_config
|
||||
from netbox.models.features import has_feature
|
||||
from netbox.signals import post_clean
|
||||
from utilities.data import get_config_value_ci
|
||||
from utilities.exceptions import AbortRequest
|
||||
from .models import CustomField, TaggedItem
|
||||
from .utils import run_validators
|
||||
@@ -65,7 +66,7 @@ def run_save_validators(sender, instance, **kwargs):
|
||||
Run any custom validation rules for the model prior to calling save().
|
||||
"""
|
||||
model_name = f'{sender._meta.app_label}.{sender._meta.model_name}'
|
||||
validators = get_config().CUSTOM_VALIDATORS.get(model_name, [])
|
||||
validators = get_config_value_ci(get_config().CUSTOM_VALIDATORS, model_name, default=[])
|
||||
|
||||
run_validators(instance, validators)
|
||||
|
||||
|
||||
@@ -87,7 +87,9 @@ class Service(ContactsMixin, ServiceBase, PrimaryModel):
|
||||
help_text=_("The specific IP addresses (if any) to which this application service is bound")
|
||||
)
|
||||
|
||||
clone_fields = ['protocol', 'ports', 'description', 'parent', 'ipaddresses', ]
|
||||
clone_fields = (
|
||||
'protocol', 'ports', 'description', 'parent_object_type', 'parent_object_id', 'ipaddresses',
|
||||
)
|
||||
|
||||
class Meta:
|
||||
indexes = (
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.db.backends.postgresql.psycopg_any import NumericRange
|
||||
from django.utils.translation import gettext as _
|
||||
@@ -109,7 +110,7 @@ class ContentTypeField(RelatedField):
|
||||
def to_internal_value(self, data):
|
||||
try:
|
||||
app_label, model = data.split('.')
|
||||
return self.queryset.get(app_label=app_label, model=model)
|
||||
return ContentType.objects.get_by_natural_key(app_label=app_label, model=model)
|
||||
except ObjectDoesNotExist:
|
||||
self.fail('does_not_exist', content_type=data)
|
||||
except (AttributeError, TypeError, ValueError):
|
||||
|
||||
@@ -112,6 +112,7 @@ class ValidatedModelSerializer(BaseModelSerializer):
|
||||
for k, v in attrs.items():
|
||||
setattr(instance, k, v)
|
||||
instance._m2m_values = m2m_values
|
||||
instance.full_clean()
|
||||
# Skip uniqueness validation of individual fields inside `full_clean()` (this is handled by the serializer)
|
||||
instance.full_clean(validate_unique=False)
|
||||
|
||||
return data
|
||||
|
||||
@@ -170,6 +170,28 @@ class NetBoxModelViewSet(
|
||||
|
||||
# Creates
|
||||
|
||||
def create(self, request, *args, **kwargs):
|
||||
serializer = self.get_serializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
bulk_create = getattr(serializer, 'many', False)
|
||||
self.perform_create(serializer)
|
||||
|
||||
# After creating the instance(s), re-initialize the serializer with a queryset
|
||||
# to ensure related objects are prefetched.
|
||||
if bulk_create:
|
||||
instance_pks = [obj.pk for obj in serializer.instance]
|
||||
# Order by PK to ensure that the ordering of objects in the response
|
||||
# matches the ordering of those in the request.
|
||||
qs = self.get_queryset().filter(pk__in=instance_pks).order_by('pk')
|
||||
else:
|
||||
qs = self.get_queryset().get(pk=serializer.instance.pk)
|
||||
|
||||
# Re-serialize the instance(s) with prefetched data
|
||||
serializer = self.get_serializer(qs, many=bulk_create)
|
||||
|
||||
headers = self.get_success_headers(serializer.data)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)
|
||||
|
||||
def perform_create(self, serializer):
|
||||
model = self.queryset.model
|
||||
logger = logging.getLogger(f'netbox.api.views.{self.__class__.__name__}')
|
||||
@@ -186,9 +208,20 @@ class NetBoxModelViewSet(
|
||||
# Updates
|
||||
|
||||
def update(self, request, *args, **kwargs):
|
||||
# Hotwire get_object() to ensure we save a pre-change snapshot
|
||||
self.get_object = self.get_object_with_snapshot
|
||||
return super().update(request, *args, **kwargs)
|
||||
partial = kwargs.pop('partial', False)
|
||||
instance = self.get_object_with_snapshot()
|
||||
serializer = self.get_serializer(instance, data=request.data, partial=partial)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
self.perform_update(serializer)
|
||||
|
||||
# After updating the instance, re-initialize the serializer with a queryset
|
||||
# to ensure related objects are prefetched.
|
||||
qs = self.get_queryset().get(pk=serializer.instance.pk)
|
||||
|
||||
# Re-serialize the instance(s) with prefetched data
|
||||
serializer = self.get_serializer(qs)
|
||||
|
||||
return Response(serializer.data)
|
||||
|
||||
def perform_update(self, serializer):
|
||||
model = self.queryset.model
|
||||
|
||||
@@ -108,13 +108,17 @@ class BulkUpdateModelMixin:
|
||||
obj.pop('id'): obj for obj in request.data
|
||||
}
|
||||
|
||||
data = self.perform_bulk_update(qs, update_data, partial=partial)
|
||||
object_pks = self.perform_bulk_update(qs, update_data, partial=partial)
|
||||
|
||||
return Response(data, status=status.HTTP_200_OK)
|
||||
# Prefetch related objects for all updated instances
|
||||
qs = self.get_queryset().filter(pk__in=object_pks)
|
||||
serializer = self.get_serializer(qs, many=True)
|
||||
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
def perform_bulk_update(self, objects, update_data, partial):
|
||||
updated_pks = []
|
||||
with transaction.atomic(using=router.db_for_write(self.queryset.model)):
|
||||
data_list = []
|
||||
for obj in objects:
|
||||
data = update_data.get(obj.id)
|
||||
if hasattr(obj, 'snapshot'):
|
||||
@@ -122,9 +126,9 @@ class BulkUpdateModelMixin:
|
||||
serializer = self.get_serializer(obj, data=data, partial=partial)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
self.perform_update(serializer)
|
||||
data_list.append(serializer.data)
|
||||
updated_pks.append(obj.pk)
|
||||
|
||||
return data_list
|
||||
return updated_pks
|
||||
|
||||
def bulk_partial_update(self, request, *args, **kwargs):
|
||||
kwargs['partial'] = True
|
||||
|
||||
@@ -43,6 +43,16 @@ REDIS = {
|
||||
# 'INSECURE_SKIP_TLS_VERIFY': False,
|
||||
# Set a path to a certificate authority, typically used with a self signed certificate.
|
||||
# 'CA_CERT_PATH': '/etc/ssl/certs/ca.crt',
|
||||
# Advanced Redis client parameters (SSL/TLS, timeouts, etc.)
|
||||
# Passed directly to redis-py. See: https://redis-py.readthedocs.io/en/stable/connections.html
|
||||
# NOTE: The CA_CERT_PATH setting above is already mapped to 'ssl_ca_certs' in KWARGS.
|
||||
# Only override these parameters in KWARGS if you have a specific reason to do so.
|
||||
# 'KWARGS': {
|
||||
# 'ssl_certfile': '/path/to/client-cert.pem',
|
||||
# 'ssl_keyfile': '/path/to/client-key.pem',
|
||||
# 'ssl_min_version': ssl.TLSVersion.TLSv1_2,
|
||||
# 'ssl_ciphers': 'HIGH:!aNULL',
|
||||
# },
|
||||
},
|
||||
'caching': {
|
||||
'HOST': 'localhost',
|
||||
@@ -59,6 +69,17 @@ REDIS = {
|
||||
# 'INSECURE_SKIP_TLS_VERIFY': False,
|
||||
# Set a path to a certificate authority, typically used with a self signed certificate.
|
||||
# 'CA_CERT_PATH': '/etc/ssl/certs/ca.crt',
|
||||
# Advanced Redis client parameters (SSL/TLS, timeouts, etc.)
|
||||
# Passed directly to Redis connection pool. See: https://github.com/jazzband/django-redis#configure-as-cache-backend
|
||||
# NOTE: The INSECURE_SKIP_TLS_VERIFY setting above is already mapped to 'ssl_cert_reqs' and
|
||||
# CA_CERT_PATH is mapped to 'ssl_ca_certs' in KWARGS. Only override these parameters
|
||||
# in KWARGS if you have a specific reason to do so.
|
||||
# 'KWARGS': {
|
||||
# 'ssl_certfile': '/path/to/client-cert.pem',
|
||||
# 'ssl_keyfile': '/path/to/client-key.pem',
|
||||
# 'ssl_min_version': ssl.TLSVersion.TLSv1_2,
|
||||
# 'ssl_ciphers': 'HIGH:!aNULL',
|
||||
# },
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -305,18 +305,13 @@ class NetBoxModelFilterSet(ChangeLoggedModelFilterSet):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
# Dynamically add a Filter for each CustomField applicable to the parent model
|
||||
custom_fields = CustomField.objects.filter(
|
||||
object_types=ContentType.objects.get_for_model(self._meta.model)
|
||||
).exclude(
|
||||
filter_logic=CustomFieldFilterLogicChoices.FILTER_DISABLED
|
||||
)
|
||||
|
||||
custom_field_filters = {}
|
||||
for custom_field in custom_fields:
|
||||
filter_name = f'cf_{custom_field.name}'
|
||||
filter_instance = custom_field.to_filter()
|
||||
if filter_instance:
|
||||
for custom_field in CustomField.objects.get_for_model(self._meta.model):
|
||||
if custom_field.filter_logic == CustomFieldFilterLogicChoices.FILTER_DISABLED:
|
||||
# Skip disabled fields
|
||||
continue
|
||||
if filter_instance := custom_field.to_filter():
|
||||
filter_name = f'cf_{custom_field.name}'
|
||||
custom_field_filters[filter_name] = filter_instance
|
||||
|
||||
# Add relevant additional lookups
|
||||
|
||||
@@ -31,10 +31,11 @@ class NetBoxModelImportForm(CSVModelForm, NetBoxModelForm):
|
||||
)
|
||||
|
||||
def _get_custom_fields(self, content_type):
|
||||
return CustomField.objects.filter(
|
||||
object_types=content_type,
|
||||
ui_editable=CustomFieldUIEditableChoices.YES
|
||||
)
|
||||
# Return only custom fields that are editable in the UI
|
||||
return [
|
||||
cf for cf in CustomField.objects.get_for_model(content_type.model_class())
|
||||
if cf.ui_editable == CustomFieldUIEditableChoices.YES
|
||||
]
|
||||
|
||||
def _get_form_field(self, customfield):
|
||||
return customfield.to_form_field(for_csv_import=True)
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
from django import forms
|
||||
from django.db.models import Q
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from extras.choices import *
|
||||
@@ -35,10 +34,13 @@ class NetBoxModelFilterSetForm(FilterModifierMixin, CustomFieldsMixin, SavedFilt
|
||||
selector_fields = ('filter_id', 'q')
|
||||
|
||||
def _get_custom_fields(self, content_type):
|
||||
return super()._get_custom_fields(content_type).exclude(
|
||||
Q(filter_logic=CustomFieldFilterLogicChoices.FILTER_DISABLED) |
|
||||
Q(type=CustomFieldTypeChoices.TYPE_JSON)
|
||||
)
|
||||
# Return only non-hidden custom fields for which filtering is enabled (excluding JSON fields)
|
||||
return [
|
||||
cf for cf in super()._get_custom_fields(content_type) if (
|
||||
cf.filter_logic != CustomFieldFilterLogicChoices.FILTER_DISABLED and
|
||||
cf.type != CustomFieldTypeChoices.TYPE_JSON
|
||||
)
|
||||
]
|
||||
|
||||
def _get_form_field(self, customfield):
|
||||
return customfield.to_form_field(
|
||||
|
||||
@@ -65,9 +65,11 @@ class CustomFieldsMixin:
|
||||
return ObjectType.objects.get_for_model(self.model)
|
||||
|
||||
def _get_custom_fields(self, content_type):
|
||||
return CustomField.objects.filter(object_types=content_type).exclude(
|
||||
ui_editable=CustomFieldUIEditableChoices.HIDDEN
|
||||
)
|
||||
# Return only custom fields that are not hidden from the UI
|
||||
return [
|
||||
cf for cf in CustomField.objects.get_for_model(content_type.model_class())
|
||||
if cf.ui_editable != CustomFieldUIEditableChoices.HIDDEN
|
||||
]
|
||||
|
||||
def _get_form_field(self, customfield):
|
||||
return customfield.to_form_field()
|
||||
|
||||
50
netbox/netbox/graphql/pagination.py
Normal file
50
netbox/netbox/graphql/pagination.py
Normal file
@@ -0,0 +1,50 @@
|
||||
import strawberry
|
||||
from strawberry.types.unset import UNSET
|
||||
from strawberry_django.pagination import _QS, apply
|
||||
|
||||
__all__ = (
|
||||
'OffsetPaginationInfo',
|
||||
'OffsetPaginationInput',
|
||||
'apply_pagination',
|
||||
)
|
||||
|
||||
|
||||
@strawberry.type
|
||||
class OffsetPaginationInfo:
|
||||
offset: int = 0
|
||||
limit: int | None = UNSET
|
||||
start: int | None = UNSET
|
||||
|
||||
|
||||
@strawberry.input
|
||||
class OffsetPaginationInput(OffsetPaginationInfo):
|
||||
"""
|
||||
Customized implementation of OffsetPaginationInput to support cursor-based pagination.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
def apply_pagination(
|
||||
self,
|
||||
queryset: _QS,
|
||||
pagination: OffsetPaginationInput | None = None,
|
||||
*,
|
||||
related_field_id: str | None = None,
|
||||
) -> _QS:
|
||||
"""
|
||||
Replacement for the `apply_pagination()` method on StrawberryDjangoField to support cursor-based pagination.
|
||||
"""
|
||||
if pagination is not None and pagination.start not in (None, UNSET):
|
||||
if pagination.offset:
|
||||
raise ValueError('Cannot specify both `start` and `offset` in pagination.')
|
||||
if pagination.start < 0:
|
||||
raise ValueError('`start` must be greater than or equal to zero.')
|
||||
|
||||
# Filter the queryset to include only records with a primary key greater than or equal to the start value,
|
||||
# and force ordering by primary key to ensure consistent pagination across all records.
|
||||
queryset = queryset.filter(pk__gte=pagination.start).order_by('pk')
|
||||
|
||||
# Ignore `offset` when `start` is set
|
||||
pagination.offset = 0
|
||||
|
||||
return apply(pagination, queryset, related_field_id=related_field_id)
|
||||
@@ -2,7 +2,7 @@ import json
|
||||
from collections import defaultdict
|
||||
from functools import cached_property
|
||||
|
||||
from django.contrib.contenttypes.fields import GenericRelation
|
||||
from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.core.validators import ValidationError
|
||||
from django.db import models
|
||||
@@ -121,9 +121,11 @@ class ChangeLoggingMixin(DeleteMixin, models.Model):
|
||||
if hasattr(self, '_prechange_snapshot'):
|
||||
objectchange.prechange_data = self._prechange_snapshot
|
||||
if action in (ObjectChangeActionChoices.ACTION_CREATE, ObjectChangeActionChoices.ACTION_UPDATE):
|
||||
objectchange.postchange_data = self.serialize_object(exclude=exclude)
|
||||
self._postchange_snapshot = self.serialize_object(exclude=exclude)
|
||||
objectchange.postchange_data = self._postchange_snapshot
|
||||
|
||||
return objectchange
|
||||
to_objectchange.alters_data = True
|
||||
|
||||
|
||||
class CloningMixin(models.Model):
|
||||
@@ -159,6 +161,13 @@ class CloningMixin(models.Model):
|
||||
elif field_value not in (None, ''):
|
||||
attrs[field_name] = field_value
|
||||
|
||||
# Handle GenericForeignKeys. If the CT and ID fields are being cloned, also
|
||||
# include the name of the GFK attribute itself, as this is what forms expect.
|
||||
for field in self._meta.private_fields:
|
||||
if isinstance(field, GenericForeignKey):
|
||||
if field.ct_field in attrs and field.fk_field in attrs:
|
||||
attrs[field.name] = attrs[field.fk_field]
|
||||
|
||||
# Include tags (if applicable)
|
||||
if is_taggable(self):
|
||||
attrs['tags'] = [tag.pk for tag in self.tags.all()]
|
||||
@@ -317,9 +326,11 @@ class CustomFieldsMixin(models.Model):
|
||||
raise ValidationError(_("Missing required custom field '{name}'.").format(name=cf.name))
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
# Populate default values if omitted
|
||||
for cf in self.custom_fields.filter(default__isnull=False):
|
||||
if cf.name not in self.custom_field_data:
|
||||
from extras.models import CustomField
|
||||
|
||||
# Populate default values for custom fields not already present in the object data
|
||||
for cf in CustomField.objects.get_for_model(self):
|
||||
if cf.name not in self.custom_field_data and cf.default is not None:
|
||||
self.custom_field_data[cf.name] = cf.default
|
||||
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
@@ -187,7 +187,6 @@ class CachedValueSearchBackend(SearchBackend):
|
||||
return ret
|
||||
|
||||
def cache(self, instances, indexer=None, remove_existing=True):
|
||||
object_type = None
|
||||
custom_fields = None
|
||||
|
||||
# Convert a single instance to an iterable
|
||||
@@ -208,15 +207,18 @@ class CachedValueSearchBackend(SearchBackend):
|
||||
except KeyError:
|
||||
break
|
||||
|
||||
# Prefetch any associated custom fields
|
||||
object_type = ObjectType.objects.get_for_model(indexer.model)
|
||||
custom_fields = CustomField.objects.filter(object_types=object_type).exclude(search_weight=0)
|
||||
# Prefetch any associated custom fields (excluding those with a zero search weight)
|
||||
custom_fields = [
|
||||
cf for cf in CustomField.objects.get_for_model(indexer.model)
|
||||
if cf.search_weight > 0
|
||||
]
|
||||
|
||||
# Wipe out any previously cached values for the object
|
||||
if remove_existing:
|
||||
self.remove(instance)
|
||||
|
||||
# Generate cache data
|
||||
object_type = ObjectType.objects.get_for_model(indexer.model)
|
||||
for field in indexer.to_cache(instance, custom_fields=custom_fields):
|
||||
buffer.append(
|
||||
CachedValue(
|
||||
|
||||
@@ -12,10 +12,13 @@ from django.core.validators import URLValidator
|
||||
from django.utils.module_loading import import_string
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from rest_framework.utils import field_mapping
|
||||
from strawberry_django import pagination
|
||||
from strawberry_django.fields.field import StrawberryDjangoField
|
||||
|
||||
from core.exceptions import IncompatiblePluginError
|
||||
from netbox.config import PARAMS as CONFIG_PARAMS
|
||||
from netbox.constants import RQ_QUEUE_DEFAULT, RQ_QUEUE_HIGH, RQ_QUEUE_LOW
|
||||
from netbox.graphql.pagination import OffsetPaginationInput, apply_pagination
|
||||
from netbox.plugins import PluginConfig
|
||||
from netbox.registry import registry
|
||||
import storages.utils # type: ignore
|
||||
@@ -33,6 +36,12 @@ from .monkey import get_unique_validators
|
||||
# Override DRF's get_unique_validators() function with our own (see bug #19302)
|
||||
field_mapping.get_unique_validators = get_unique_validators
|
||||
|
||||
# Override strawberry-django's OffsetPaginationInput class to add the `start` parameter
|
||||
pagination.OffsetPaginationInput = OffsetPaginationInput
|
||||
|
||||
# Patch StrawberryDjangoField to use our custom `apply_pagination()` method with support for cursor-based pagination
|
||||
StrawberryDjangoField.apply_pagination = apply_pagination
|
||||
|
||||
|
||||
#
|
||||
# Environment setup
|
||||
@@ -399,6 +408,12 @@ if CACHING_REDIS_CA_CERT_PATH:
|
||||
CACHES['default']['OPTIONS'].setdefault('CONNECTION_POOL_KWARGS', {})
|
||||
CACHES['default']['OPTIONS']['CONNECTION_POOL_KWARGS']['ssl_ca_certs'] = CACHING_REDIS_CA_CERT_PATH
|
||||
|
||||
# Merge in KWARGS for additional parameters
|
||||
caching_redis_kwargs = REDIS['caching'].get('KWARGS')
|
||||
if caching_redis_kwargs:
|
||||
CACHES['default']['OPTIONS'].setdefault('CONNECTION_POOL_KWARGS', {})
|
||||
CACHES['default']['OPTIONS']['CONNECTION_POOL_KWARGS'].update(caching_redis_kwargs)
|
||||
|
||||
|
||||
#
|
||||
# Sessions
|
||||
@@ -808,6 +823,12 @@ if TASKS_REDIS_CA_CERT_PATH:
|
||||
RQ_PARAMS.setdefault('REDIS_CLIENT_KWARGS', {})
|
||||
RQ_PARAMS['REDIS_CLIENT_KWARGS']['ssl_ca_certs'] = TASKS_REDIS_CA_CERT_PATH
|
||||
|
||||
# Merge in KWARGS for additional parameters
|
||||
tasks_redis_kwargs = TASKS_REDIS.get('KWARGS')
|
||||
if tasks_redis_kwargs:
|
||||
RQ_PARAMS.setdefault('REDIS_CLIENT_KWARGS', {})
|
||||
RQ_PARAMS['REDIS_CLIENT_KWARGS'].update(tasks_redis_kwargs)
|
||||
|
||||
# Define named RQ queues
|
||||
RQ_QUEUES = {
|
||||
RQ_QUEUE_HIGH: RQ_PARAMS,
|
||||
|
||||
@@ -242,14 +242,17 @@ class NetBoxTable(BaseTable):
|
||||
(name, deepcopy(column)) for name, column in registered_columns.items()
|
||||
])
|
||||
|
||||
# Add custom field & custom link columns
|
||||
object_type = ObjectType.objects.get_for_model(self._meta.model)
|
||||
custom_fields = CustomField.objects.filter(
|
||||
object_types=object_type
|
||||
).exclude(ui_visible=CustomFieldUIVisibleChoices.HIDDEN)
|
||||
# Add columns for custom fields
|
||||
custom_fields = [
|
||||
cf for cf in CustomField.objects.get_for_model(self._meta.model)
|
||||
if cf.ui_visible != CustomFieldUIVisibleChoices.HIDDEN
|
||||
]
|
||||
extra_columns.extend([
|
||||
(f'cf_{cf.name}', columns.CustomFieldColumn(cf)) for cf in custom_fields
|
||||
])
|
||||
|
||||
# Add columns for custom links
|
||||
object_type = ObjectType.objects.get_for_model(self._meta.model)
|
||||
custom_links = CustomLink.objects.filter(object_types=object_type, enabled=True)
|
||||
extra_columns.extend([
|
||||
(f'cl_{cl.name}', columns.CustomLinkColumn(cl)) for cl in custom_links
|
||||
|
||||
@@ -4,10 +4,8 @@ from django.test import override_settings
|
||||
from django.urls import reverse
|
||||
from rest_framework import status
|
||||
|
||||
from core.models import ObjectType
|
||||
from dcim.choices import LocationStatusChoices
|
||||
from dcim.models import Site, Location
|
||||
from users.models import ObjectPermission
|
||||
from utilities.testing import disable_warnings, APITestCase, TestCase
|
||||
|
||||
|
||||
@@ -45,17 +43,28 @@ class GraphQLTestCase(TestCase):
|
||||
|
||||
class GraphQLAPITestCase(APITestCase):
|
||||
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
sites = (
|
||||
Site(name='Site 1', slug='site-1'),
|
||||
Site(name='Site 2', slug='site-2'),
|
||||
Site(name='Site 3', slug='site-3'),
|
||||
Site(name='Site 4', slug='site-4'),
|
||||
Site(name='Site 5', slug='site-5'),
|
||||
Site(name='Site 6', slug='site-6'),
|
||||
Site(name='Site 7', slug='site-7'),
|
||||
)
|
||||
Site.objects.bulk_create(sites)
|
||||
|
||||
@override_settings(LOGIN_REQUIRED=True)
|
||||
def test_graphql_filter_objects(self):
|
||||
"""
|
||||
Test the operation of filters for GraphQL API requests.
|
||||
"""
|
||||
sites = (
|
||||
Site(name='Site 1', slug='site-1'),
|
||||
Site(name='Site 2', slug='site-2'),
|
||||
Site(name='Site 3', slug='site-3'),
|
||||
)
|
||||
Site.objects.bulk_create(sites)
|
||||
self.add_permissions('dcim.view_site', 'dcim.view_location')
|
||||
url = reverse('graphql')
|
||||
|
||||
sites = Site.objects.all()[:3]
|
||||
Location.objects.create(
|
||||
site=sites[0],
|
||||
name='Location 1',
|
||||
@@ -75,18 +84,6 @@ class GraphQLAPITestCase(APITestCase):
|
||||
status=LocationStatusChoices.STATUS_ACTIVE
|
||||
),
|
||||
|
||||
# Add object-level permission
|
||||
obj_perm = ObjectPermission(
|
||||
name='Test permission',
|
||||
actions=['view']
|
||||
)
|
||||
obj_perm.save()
|
||||
obj_perm.users.add(self.user)
|
||||
obj_perm.object_types.add(ObjectType.objects.get_for_model(Location))
|
||||
obj_perm.object_types.add(ObjectType.objects.get_for_model(Site))
|
||||
|
||||
url = reverse('graphql')
|
||||
|
||||
# A valid request should return the filtered list
|
||||
query = '{location_list(filters: {site_id: "' + str(sites[0].pk) + '"}) {id site {id}}}'
|
||||
response = self.client.post(url, data={'query': query}, format="json", **self.header)
|
||||
@@ -133,10 +130,136 @@ class GraphQLAPITestCase(APITestCase):
|
||||
self.assertEqual(len(data['data']['location_list']), 0)
|
||||
|
||||
# Removing the permissions from location should result in an empty locations list
|
||||
obj_perm.object_types.remove(ObjectType.objects.get_for_model(Location))
|
||||
self.remove_permissions('dcim.view_location')
|
||||
query = '{site(id: ' + str(sites[0].pk) + ') {id locations {id}}}'
|
||||
response = self.client.post(url, data={'query': query}, format="json", **self.header)
|
||||
self.assertHttpStatus(response, status.HTTP_200_OK)
|
||||
data = json.loads(response.content)
|
||||
self.assertNotIn('errors', data)
|
||||
self.assertEqual(len(data['data']['site']['locations']), 0)
|
||||
|
||||
def test_offset_pagination(self):
|
||||
self.add_permissions('dcim.view_site')
|
||||
url = reverse('graphql')
|
||||
|
||||
# Test `limit` only
|
||||
query = """
|
||||
{
|
||||
site_list(pagination: {limit: 3}) {
|
||||
id name
|
||||
}
|
||||
}
|
||||
"""
|
||||
response = self.client.post(url, data={'query': query}, format='json', **self.header)
|
||||
self.assertHttpStatus(response, status.HTTP_200_OK)
|
||||
data = json.loads(response.content)
|
||||
self.assertNotIn('errors', data)
|
||||
self.assertEqual(len(data['data']['site_list']), 3)
|
||||
self.assertEqual(data['data']['site_list'][0]['name'], 'Site 1')
|
||||
self.assertEqual(data['data']['site_list'][1]['name'], 'Site 2')
|
||||
self.assertEqual(data['data']['site_list'][2]['name'], 'Site 3')
|
||||
|
||||
# Test `offset` only
|
||||
query = """
|
||||
{
|
||||
site_list(pagination: {offset: 3}) {
|
||||
id name
|
||||
}
|
||||
}
|
||||
"""
|
||||
response = self.client.post(url, data={'query': query}, format='json', **self.header)
|
||||
self.assertHttpStatus(response, status.HTTP_200_OK)
|
||||
data = json.loads(response.content)
|
||||
self.assertNotIn('errors', data)
|
||||
self.assertEqual(len(data['data']['site_list']), 4)
|
||||
self.assertEqual(data['data']['site_list'][0]['name'], 'Site 4')
|
||||
self.assertEqual(data['data']['site_list'][1]['name'], 'Site 5')
|
||||
self.assertEqual(data['data']['site_list'][2]['name'], 'Site 6')
|
||||
self.assertEqual(data['data']['site_list'][3]['name'], 'Site 7')
|
||||
|
||||
# Test `offset` & `limit`
|
||||
query = """
|
||||
{
|
||||
site_list(pagination: {offset: 3, limit: 3}) {
|
||||
id name
|
||||
}
|
||||
}
|
||||
"""
|
||||
response = self.client.post(url, data={'query': query}, format='json', **self.header)
|
||||
self.assertHttpStatus(response, status.HTTP_200_OK)
|
||||
data = json.loads(response.content)
|
||||
self.assertNotIn('errors', data)
|
||||
self.assertEqual(len(data['data']['site_list']), 3)
|
||||
self.assertEqual(data['data']['site_list'][0]['name'], 'Site 4')
|
||||
self.assertEqual(data['data']['site_list'][1]['name'], 'Site 5')
|
||||
self.assertEqual(data['data']['site_list'][2]['name'], 'Site 6')
|
||||
|
||||
def test_cursor_pagination(self):
|
||||
self.add_permissions('dcim.view_site')
|
||||
url = reverse('graphql')
|
||||
|
||||
# Page 1
|
||||
query = """
|
||||
{
|
||||
site_list(pagination: {start: 0, limit: 3}) {
|
||||
id name
|
||||
}
|
||||
}
|
||||
"""
|
||||
response = self.client.post(url, data={'query': query}, format='json', **self.header)
|
||||
self.assertHttpStatus(response, status.HTTP_200_OK)
|
||||
data = json.loads(response.content)
|
||||
self.assertNotIn('errors', data)
|
||||
self.assertEqual(len(data['data']['site_list']), 3)
|
||||
self.assertEqual(data['data']['site_list'][0]['name'], 'Site 1')
|
||||
self.assertEqual(data['data']['site_list'][1]['name'], 'Site 2')
|
||||
self.assertEqual(data['data']['site_list'][2]['name'], 'Site 3')
|
||||
|
||||
# Page 2
|
||||
start_id = int(data['data']['site_list'][-1]['id']) + 1
|
||||
query = """
|
||||
{
|
||||
site_list(pagination: {start: """ + str(start_id) + """, limit: 3}) {
|
||||
id name
|
||||
}
|
||||
}
|
||||
"""
|
||||
response = self.client.post(url, data={'query': query}, format='json', **self.header)
|
||||
self.assertHttpStatus(response, status.HTTP_200_OK)
|
||||
data = json.loads(response.content)
|
||||
self.assertNotIn('errors', data)
|
||||
self.assertEqual(len(data['data']['site_list']), 3)
|
||||
self.assertEqual(data['data']['site_list'][0]['name'], 'Site 4')
|
||||
self.assertEqual(data['data']['site_list'][1]['name'], 'Site 5')
|
||||
self.assertEqual(data['data']['site_list'][2]['name'], 'Site 6')
|
||||
|
||||
# Page 3
|
||||
start_id = int(data['data']['site_list'][-1]['id']) + 1
|
||||
query = """
|
||||
{
|
||||
site_list(pagination: {start: """ + str(start_id) + """, limit: 3}) {
|
||||
id name
|
||||
}
|
||||
}
|
||||
"""
|
||||
response = self.client.post(url, data={'query': query}, format='json', **self.header)
|
||||
self.assertHttpStatus(response, status.HTTP_200_OK)
|
||||
data = json.loads(response.content)
|
||||
self.assertNotIn('errors', data)
|
||||
self.assertEqual(len(data['data']['site_list']), 1)
|
||||
self.assertEqual(data['data']['site_list'][0]['name'], 'Site 7')
|
||||
|
||||
def test_pagination_conflict(self):
|
||||
url = reverse('graphql')
|
||||
query = """
|
||||
{
|
||||
site_list(pagination: {start: 1, offset: 1}) {
|
||||
id name
|
||||
}
|
||||
}
|
||||
"""
|
||||
response = self.client.post(url, data={'query': query}, format='json', **self.header)
|
||||
self.assertHttpStatus(response, status.HTTP_200_OK)
|
||||
data = json.loads(response.content)
|
||||
self.assertIn('errors', data)
|
||||
self.assertEqual(data['errors'][0]['message'], 'Cannot specify both `start` and `offset` in pagination.')
|
||||
|
||||
@@ -1,18 +1,28 @@
|
||||
from unittest import skipIf
|
||||
|
||||
from django.conf import settings
|
||||
from django.test import TestCase
|
||||
|
||||
from core.models import AutoSyncRecord, DataSource
|
||||
from dcim.models import Site
|
||||
from extras.models import CustomLink
|
||||
from ipam.models import Prefix
|
||||
from netbox.models.features import get_model_features, has_feature, model_is_public
|
||||
from netbox.tests.dummy_plugin.models import DummyModel
|
||||
from taggit.models import Tag
|
||||
|
||||
|
||||
class ModelFeaturesTestCase(TestCase):
|
||||
"""
|
||||
A test case class for verifying model features and utility functions.
|
||||
"""
|
||||
|
||||
@skipIf('netbox.tests.dummy_plugin' not in settings.PLUGINS, 'dummy_plugin not in settings.PLUGINS')
|
||||
def test_model_is_public(self):
|
||||
"""
|
||||
Test that the is_public() utility function returns True for public models only.
|
||||
"""
|
||||
from netbox.tests.dummy_plugin.models import DummyModel
|
||||
|
||||
# Public model
|
||||
self.assertFalse(hasattr(DataSource, '_netbox_private'))
|
||||
self.assertTrue(model_is_public(DataSource))
|
||||
@@ -51,3 +61,53 @@ class ModelFeaturesTestCase(TestCase):
|
||||
features = get_model_features(CustomLink)
|
||||
self.assertIn('cloning', features)
|
||||
self.assertNotIn('bookmarks', features)
|
||||
|
||||
def test_cloningmixin_injects_gfk_attribute(self):
|
||||
"""
|
||||
Tests the cloning mixin with GFK attribute injection in the `clone` method.
|
||||
|
||||
This test validates that the `clone` method correctly handles
|
||||
and retains the General Foreign Key (GFK) attributes on an
|
||||
object when the cloning fields are explicitly defined.
|
||||
"""
|
||||
site = Site.objects.create(name='Test Site', slug='test-site')
|
||||
prefix = Prefix.objects.create(prefix='10.0.0.0/24', scope=site)
|
||||
|
||||
original_clone_fields = getattr(Prefix, 'clone_fields', None)
|
||||
try:
|
||||
Prefix.clone_fields = ('scope_type', 'scope_id')
|
||||
attrs = prefix.clone()
|
||||
|
||||
self.assertEqual(attrs['scope_type'], prefix.scope_type_id)
|
||||
self.assertEqual(attrs['scope_id'], prefix.scope_id)
|
||||
self.assertEqual(attrs['scope'], prefix.scope_id)
|
||||
finally:
|
||||
if original_clone_fields is None:
|
||||
delattr(Prefix, 'clone_fields')
|
||||
else:
|
||||
Prefix.clone_fields = original_clone_fields
|
||||
|
||||
def test_cloningmixin_does_not_inject_gfk_attribute_if_incomplete(self):
|
||||
"""
|
||||
Tests the cloning mixin with incomplete cloning fields does not inject the GFK attribute.
|
||||
|
||||
This test validates that the `clone` method correctly handles
|
||||
the case where the cloning fields are incomplete, ensuring that
|
||||
the generic foreign key (GFK) attribute is not injected during
|
||||
the cloning process.
|
||||
"""
|
||||
site = Site.objects.create(name='Test Site', slug='test-site')
|
||||
prefix = Prefix.objects.create(prefix='10.0.0.0/24', scope=site)
|
||||
|
||||
original_clone_fields = getattr(Prefix, 'clone_fields', None)
|
||||
try:
|
||||
Prefix.clone_fields = ('scope_type',)
|
||||
attrs = prefix.clone()
|
||||
|
||||
self.assertIn('scope_type', attrs)
|
||||
self.assertNotIn('scope', attrs)
|
||||
finally:
|
||||
if original_clone_fields is None:
|
||||
delattr(Prefix, 'clone_fields')
|
||||
else:
|
||||
Prefix.clone_fields = original_clone_fields
|
||||
|
||||
@@ -5,7 +5,6 @@ from copy import deepcopy
|
||||
|
||||
from django.contrib import messages
|
||||
from django.contrib.contenttypes.fields import GenericForeignKey, GenericRel
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.core.exceptions import FieldDoesNotExist, ObjectDoesNotExist, ValidationError
|
||||
from django.db import IntegrityError, router, transaction
|
||||
from django.db.models import ManyToManyField, ProtectedError, RestrictedError
|
||||
@@ -484,12 +483,11 @@ class BulkImportView(GetReturnURLMixin, BaseMultiObjectView):
|
||||
else:
|
||||
instance = self.queryset.model()
|
||||
|
||||
# For newly created objects, apply any default custom field values
|
||||
custom_fields = CustomField.objects.filter(
|
||||
object_types=ContentType.objects.get_for_model(self.queryset.model),
|
||||
ui_editable=CustomFieldUIEditableChoices.YES
|
||||
)
|
||||
for cf in custom_fields:
|
||||
# For newly created objects, apply any default values for custom fields
|
||||
for cf in CustomField.objects.get_for_model(self.queryset.model):
|
||||
if cf.ui_editable != CustomFieldUIEditableChoices.YES:
|
||||
# Skip custom fields which are not editable via the UI
|
||||
continue
|
||||
field_name = f'cf_{cf.name}'
|
||||
if field_name not in record:
|
||||
record[field_name] = cf.default
|
||||
|
||||
@@ -31,20 +31,20 @@
|
||||
"gridstack": "12.4.2",
|
||||
"htmx.org": "2.0.8",
|
||||
"query-string": "9.3.1",
|
||||
"sass": "1.97.2",
|
||||
"sass": "1.97.3",
|
||||
"tom-select": "2.4.3",
|
||||
"typeface-inter": "3.18.1",
|
||||
"typeface-roboto-mono": "1.1.13"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/compat": "^2.0.1",
|
||||
"@eslint/compat": "^2.0.2",
|
||||
"@eslint/eslintrc": "^3.3.3",
|
||||
"@eslint/js": "^9.39.2",
|
||||
"@types/bootstrap": "5.2.10",
|
||||
"@types/cookie": "^1.0.0",
|
||||
"@types/node": "^24.10.1",
|
||||
"@typescript-eslint/eslint-plugin": "^8.53.1",
|
||||
"@typescript-eslint/parser": "^8.53.1",
|
||||
"@typescript-eslint/eslint-plugin": "^8.54.0",
|
||||
"@typescript-eslint/parser": "^8.54.0",
|
||||
"esbuild": "^0.27.2",
|
||||
"esbuild-sass-plugin": "^3.6.0",
|
||||
"eslint": "^9.39.2",
|
||||
@@ -52,8 +52,8 @@
|
||||
"eslint-import-resolver-typescript": "^4.4.4",
|
||||
"eslint-plugin-import": "^2.32.0",
|
||||
"eslint-plugin-prettier": "^5.5.5",
|
||||
"globals": "^17.0.0",
|
||||
"prettier": "^3.8.0",
|
||||
"globals": "^17.3.0",
|
||||
"prettier": "^3.8.1",
|
||||
"typescript": "^5.9.3"
|
||||
},
|
||||
"resolutions": {
|
||||
|
||||
@@ -173,12 +173,12 @@
|
||||
resolved "https://registry.yarnpkg.com/@eslint-community/regexpp/-/regexpp-4.12.2.tgz#bccdf615bcf7b6e8db830ec0b8d21c9a25de597b"
|
||||
integrity sha512-EriSTlt5OC9/7SXkRSCAhfSxxoSUgBm33OH+IkwbdpgoqsSsUg7y3uh+IICI/Qg4BBWr3U2i39RpmycbxMq4ew==
|
||||
|
||||
"@eslint/compat@^2.0.1":
|
||||
version "2.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@eslint/compat/-/compat-2.0.1.tgz#5894516f8ce9ba884f4d4ba5ecb6b6459b231144"
|
||||
integrity sha512-yl/JsgplclzuvGFNqwNYV4XNPhP3l62ZOP9w/47atNAdmDtIFCx6X7CSk/SlWUuBGkT4Et/5+UD+WyvX2iiIWA==
|
||||
"@eslint/compat@^2.0.2":
|
||||
version "2.0.2"
|
||||
resolved "https://registry.yarnpkg.com/@eslint/compat/-/compat-2.0.2.tgz#fc1495688664861870f5e7ee56999dc252b6dd52"
|
||||
integrity sha512-pR1DoD0h3HfF675QZx0xsyrsU8q70Z/plx7880NOhS02NuWLgBCOMDL787nUeQ7EWLkxv3bPQJaarjcPQb2Dwg==
|
||||
dependencies:
|
||||
"@eslint/core" "^1.0.1"
|
||||
"@eslint/core" "^1.1.0"
|
||||
|
||||
"@eslint/config-array@^0.21.1":
|
||||
version "0.21.1"
|
||||
@@ -203,10 +203,10 @@
|
||||
dependencies:
|
||||
"@types/json-schema" "^7.0.15"
|
||||
|
||||
"@eslint/core@^1.0.1":
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@eslint/core/-/core-1.0.1.tgz#701ff760cbd279f9490bef0ce54095f4088d4def"
|
||||
integrity sha512-r18fEAj9uCk+VjzGt2thsbOmychS+4kxI14spVNibUO2vqKX7obOG+ymZljAwuPZl+S3clPGwCwTDtrdqTiY6Q==
|
||||
"@eslint/core@^1.1.0":
|
||||
version "1.1.0"
|
||||
resolved "https://registry.yarnpkg.com/@eslint/core/-/core-1.1.0.tgz#51f5cd970e216fbdae6721ac84491f57f965836d"
|
||||
integrity sha512-/nr9K9wkr3P1EzFTdFdMoLuo1PmIxjmwvPozwoSodjNBdefGujXQUF93u1DDZpEaTuDvMsIQddsd35BwtrW9Xw==
|
||||
dependencies:
|
||||
"@types/json-schema" "^7.0.15"
|
||||
|
||||
@@ -935,100 +935,100 @@
|
||||
dependencies:
|
||||
"@types/estree" "*"
|
||||
|
||||
"@typescript-eslint/eslint-plugin@^8.53.1":
|
||||
version "8.53.1"
|
||||
resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.53.1.tgz#f6640f6f8749b71d9ab457263939e8932a3c6b46"
|
||||
integrity sha512-cFYYFZ+oQFi6hUnBTbLRXfTJiaQtYE3t4O692agbBl+2Zy+eqSKWtPjhPXJu1G7j4RLjKgeJPDdq3EqOwmX5Ag==
|
||||
"@typescript-eslint/eslint-plugin@^8.54.0":
|
||||
version "8.54.0"
|
||||
resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.54.0.tgz#d8899e5c2eccf5c4a20d01c036a193753748454d"
|
||||
integrity sha512-hAAP5io/7csFStuOmR782YmTthKBJ9ND3WVL60hcOjvtGFb+HJxH4O5huAcmcZ9v9G8P+JETiZ/G1B8MALnWZQ==
|
||||
dependencies:
|
||||
"@eslint-community/regexpp" "^4.12.2"
|
||||
"@typescript-eslint/scope-manager" "8.53.1"
|
||||
"@typescript-eslint/type-utils" "8.53.1"
|
||||
"@typescript-eslint/utils" "8.53.1"
|
||||
"@typescript-eslint/visitor-keys" "8.53.1"
|
||||
"@typescript-eslint/scope-manager" "8.54.0"
|
||||
"@typescript-eslint/type-utils" "8.54.0"
|
||||
"@typescript-eslint/utils" "8.54.0"
|
||||
"@typescript-eslint/visitor-keys" "8.54.0"
|
||||
ignore "^7.0.5"
|
||||
natural-compare "^1.4.0"
|
||||
ts-api-utils "^2.4.0"
|
||||
|
||||
"@typescript-eslint/parser@^8.53.1":
|
||||
version "8.53.1"
|
||||
resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-8.53.1.tgz#58d4a70cc2daee2becf7d4521d65ea1782d6ec68"
|
||||
integrity sha512-nm3cvFN9SqZGXjmw5bZ6cGmvJSyJPn0wU9gHAZZHDnZl2wF9PhHv78Xf06E0MaNk4zLVHL8hb2/c32XvyJOLQg==
|
||||
"@typescript-eslint/parser@^8.54.0":
|
||||
version "8.54.0"
|
||||
resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-8.54.0.tgz#3d01a6f54ed247deb9982621f70e7abf1810bd97"
|
||||
integrity sha512-BtE0k6cjwjLZoZixN0t5AKP0kSzlGu7FctRXYuPAm//aaiZhmfq1JwdYpYr1brzEspYyFeF+8XF5j2VK6oalrA==
|
||||
dependencies:
|
||||
"@typescript-eslint/scope-manager" "8.53.1"
|
||||
"@typescript-eslint/types" "8.53.1"
|
||||
"@typescript-eslint/typescript-estree" "8.53.1"
|
||||
"@typescript-eslint/visitor-keys" "8.53.1"
|
||||
"@typescript-eslint/scope-manager" "8.54.0"
|
||||
"@typescript-eslint/types" "8.54.0"
|
||||
"@typescript-eslint/typescript-estree" "8.54.0"
|
||||
"@typescript-eslint/visitor-keys" "8.54.0"
|
||||
debug "^4.4.3"
|
||||
|
||||
"@typescript-eslint/project-service@8.53.1":
|
||||
version "8.53.1"
|
||||
resolved "https://registry.yarnpkg.com/@typescript-eslint/project-service/-/project-service-8.53.1.tgz#4e47856a0b14a1ceb28b0294b4badef3be1e9734"
|
||||
integrity sha512-WYC4FB5Ra0xidsmlPb+1SsnaSKPmS3gsjIARwbEkHkoWloQmuzcfypljaJcR78uyLA1h8sHdWWPHSLDI+MtNog==
|
||||
"@typescript-eslint/project-service@8.54.0":
|
||||
version "8.54.0"
|
||||
resolved "https://registry.yarnpkg.com/@typescript-eslint/project-service/-/project-service-8.54.0.tgz#f582aceb3d752544c8e1b11fea8d95d00cf9adc6"
|
||||
integrity sha512-YPf+rvJ1s7MyiWM4uTRhE4DvBXrEV+d8oC3P9Y2eT7S+HBS0clybdMIPnhiATi9vZOYDc7OQ1L/i6ga6NFYK/g==
|
||||
dependencies:
|
||||
"@typescript-eslint/tsconfig-utils" "^8.53.1"
|
||||
"@typescript-eslint/types" "^8.53.1"
|
||||
"@typescript-eslint/tsconfig-utils" "^8.54.0"
|
||||
"@typescript-eslint/types" "^8.54.0"
|
||||
debug "^4.4.3"
|
||||
|
||||
"@typescript-eslint/scope-manager@8.53.1":
|
||||
version "8.53.1"
|
||||
resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-8.53.1.tgz#6c4b8c82cd45ae3b365afc2373636e166743a8fa"
|
||||
integrity sha512-Lu23yw1uJMFY8cUeq7JlrizAgeQvWugNQzJp8C3x8Eo5Jw5Q2ykMdiiTB9vBVOOUBysMzmRRmUfwFrZuI2C4SQ==
|
||||
"@typescript-eslint/scope-manager@8.54.0":
|
||||
version "8.54.0"
|
||||
resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-8.54.0.tgz#307dc8cbd80157e2772c2d36216857415a71ab33"
|
||||
integrity sha512-27rYVQku26j/PbHYcVfRPonmOlVI6gihHtXFbTdB5sb6qA0wdAQAbyXFVarQ5t4HRojIz64IV90YtsjQSSGlQg==
|
||||
dependencies:
|
||||
"@typescript-eslint/types" "8.53.1"
|
||||
"@typescript-eslint/visitor-keys" "8.53.1"
|
||||
"@typescript-eslint/types" "8.54.0"
|
||||
"@typescript-eslint/visitor-keys" "8.54.0"
|
||||
|
||||
"@typescript-eslint/tsconfig-utils@8.53.1", "@typescript-eslint/tsconfig-utils@^8.53.1":
|
||||
version "8.53.1"
|
||||
resolved "https://registry.yarnpkg.com/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.53.1.tgz#efe80b8d019cd49e5a1cf46c2eb0cd2733076424"
|
||||
integrity sha512-qfvLXS6F6b1y43pnf0pPbXJ+YoXIC7HKg0UGZ27uMIemKMKA6XH2DTxsEDdpdN29D+vHV07x/pnlPNVLhdhWiA==
|
||||
"@typescript-eslint/tsconfig-utils@8.54.0", "@typescript-eslint/tsconfig-utils@^8.54.0":
|
||||
version "8.54.0"
|
||||
resolved "https://registry.yarnpkg.com/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.54.0.tgz#71dd7ba1674bd48b172fc4c85b2f734b0eae3dbc"
|
||||
integrity sha512-dRgOyT2hPk/JwxNMZDsIXDgyl9axdJI3ogZ2XWhBPsnZUv+hPesa5iuhdYt2gzwA9t8RE5ytOJ6xB0moV0Ujvw==
|
||||
|
||||
"@typescript-eslint/type-utils@8.53.1":
|
||||
version "8.53.1"
|
||||
resolved "https://registry.yarnpkg.com/@typescript-eslint/type-utils/-/type-utils-8.53.1.tgz#95de2651a96d580bf5c6c6089ddd694284d558ad"
|
||||
integrity sha512-MOrdtNvyhy0rHyv0ENzub1d4wQYKb2NmIqG7qEqPWFW7Mpy2jzFC3pQ2yKDvirZB7jypm5uGjF2Qqs6OIqu47w==
|
||||
"@typescript-eslint/type-utils@8.54.0":
|
||||
version "8.54.0"
|
||||
resolved "https://registry.yarnpkg.com/@typescript-eslint/type-utils/-/type-utils-8.54.0.tgz#64965317dd4118346c2fa5ee94492892200e9fb9"
|
||||
integrity sha512-hiLguxJWHjjwL6xMBwD903ciAwd7DmK30Y9Axs/etOkftC3ZNN9K44IuRD/EB08amu+Zw6W37x9RecLkOo3pMA==
|
||||
dependencies:
|
||||
"@typescript-eslint/types" "8.53.1"
|
||||
"@typescript-eslint/typescript-estree" "8.53.1"
|
||||
"@typescript-eslint/utils" "8.53.1"
|
||||
"@typescript-eslint/types" "8.54.0"
|
||||
"@typescript-eslint/typescript-estree" "8.54.0"
|
||||
"@typescript-eslint/utils" "8.54.0"
|
||||
debug "^4.4.3"
|
||||
ts-api-utils "^2.4.0"
|
||||
|
||||
"@typescript-eslint/types@8.53.1", "@typescript-eslint/types@^8.53.1":
|
||||
version "8.53.1"
|
||||
resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-8.53.1.tgz#101f203f0807a63216cceceedb815fabe21d5793"
|
||||
integrity sha512-jr/swrr2aRmUAUjW5/zQHbMaui//vQlsZcJKijZf3M26bnmLj8LyZUpj8/Rd6uzaek06OWsqdofN/Thenm5O8A==
|
||||
"@typescript-eslint/types@8.54.0", "@typescript-eslint/types@^8.54.0":
|
||||
version "8.54.0"
|
||||
resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-8.54.0.tgz#c12d41f67a2e15a8a96fbc5f2d07b17331130889"
|
||||
integrity sha512-PDUI9R1BVjqu7AUDsRBbKMtwmjWcn4J3le+5LpcFgWULN3LvHC5rkc9gCVxbrsrGmO1jfPybN5s6h4Jy+OnkAA==
|
||||
|
||||
"@typescript-eslint/typescript-estree@8.53.1":
|
||||
version "8.53.1"
|
||||
resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-8.53.1.tgz#b6dce2303c9e27e95b8dcd8c325868fff53e488f"
|
||||
integrity sha512-RGlVipGhQAG4GxV1s34O91cxQ/vWiHJTDHbXRr0li2q/BGg3RR/7NM8QDWgkEgrwQYCvmJV9ichIwyoKCQ+DTg==
|
||||
"@typescript-eslint/typescript-estree@8.54.0":
|
||||
version "8.54.0"
|
||||
resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-8.54.0.tgz#3c7716905b2b811fadbd2114804047d1bfc86527"
|
||||
integrity sha512-BUwcskRaPvTk6fzVWgDPdUndLjB87KYDrN5EYGetnktoeAvPtO4ONHlAZDnj5VFnUANg0Sjm7j4usBlnoVMHwA==
|
||||
dependencies:
|
||||
"@typescript-eslint/project-service" "8.53.1"
|
||||
"@typescript-eslint/tsconfig-utils" "8.53.1"
|
||||
"@typescript-eslint/types" "8.53.1"
|
||||
"@typescript-eslint/visitor-keys" "8.53.1"
|
||||
"@typescript-eslint/project-service" "8.54.0"
|
||||
"@typescript-eslint/tsconfig-utils" "8.54.0"
|
||||
"@typescript-eslint/types" "8.54.0"
|
||||
"@typescript-eslint/visitor-keys" "8.54.0"
|
||||
debug "^4.4.3"
|
||||
minimatch "^9.0.5"
|
||||
semver "^7.7.3"
|
||||
tinyglobby "^0.2.15"
|
||||
ts-api-utils "^2.4.0"
|
||||
|
||||
"@typescript-eslint/utils@8.53.1":
|
||||
version "8.53.1"
|
||||
resolved "https://registry.yarnpkg.com/@typescript-eslint/utils/-/utils-8.53.1.tgz#81fe6c343de288701b774f4d078382f567e6edaa"
|
||||
integrity sha512-c4bMvGVWW4hv6JmDUEG7fSYlWOl3II2I4ylt0NM+seinYQlZMQIaKaXIIVJWt9Ofh6whrpM+EdDQXKXjNovvrg==
|
||||
"@typescript-eslint/utils@8.54.0":
|
||||
version "8.54.0"
|
||||
resolved "https://registry.yarnpkg.com/@typescript-eslint/utils/-/utils-8.54.0.tgz#c79a4bcbeebb4f571278c0183ed1cb601d84c6c8"
|
||||
integrity sha512-9Cnda8GS57AQakvRyG0PTejJNlA2xhvyNtEVIMlDWOOeEyBkYWhGPnfrIAnqxLMTSTo6q8g12XVjjev5l1NvMA==
|
||||
dependencies:
|
||||
"@eslint-community/eslint-utils" "^4.9.1"
|
||||
"@typescript-eslint/scope-manager" "8.53.1"
|
||||
"@typescript-eslint/types" "8.53.1"
|
||||
"@typescript-eslint/typescript-estree" "8.53.1"
|
||||
"@typescript-eslint/scope-manager" "8.54.0"
|
||||
"@typescript-eslint/types" "8.54.0"
|
||||
"@typescript-eslint/typescript-estree" "8.54.0"
|
||||
|
||||
"@typescript-eslint/visitor-keys@8.53.1":
|
||||
version "8.53.1"
|
||||
resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-8.53.1.tgz#405f04959be22b9be364939af8ac19c3649b6eb7"
|
||||
integrity sha512-oy+wV7xDKFPRyNggmXuZQSBzvoLnpmJs+GhzRhPjrxl2b/jIlyjVokzm47CZCDUdXKr2zd7ZLodPfOBpOPyPlg==
|
||||
"@typescript-eslint/visitor-keys@8.54.0":
|
||||
version "8.54.0"
|
||||
resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-8.54.0.tgz#0e4b50124b210b8600b245dd66cbad52deb15590"
|
||||
integrity sha512-VFlhGSl4opC0bprJiItPQ1RfUhGDIBokcPwaFH4yiBCaNPeld/9VeXbiPO1cLyorQi1G1vL+ecBk1x8o1axORA==
|
||||
dependencies:
|
||||
"@typescript-eslint/types" "8.53.1"
|
||||
"@typescript-eslint/types" "8.54.0"
|
||||
eslint-visitor-keys "^4.2.1"
|
||||
|
||||
"@unrs/resolver-binding-android-arm-eabi@1.11.1":
|
||||
@@ -2184,10 +2184,10 @@ globals@^14.0.0:
|
||||
resolved "https://registry.npmjs.org/globals/-/globals-14.0.0.tgz"
|
||||
integrity sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ==
|
||||
|
||||
globals@^17.0.0:
|
||||
version "17.0.0"
|
||||
resolved "https://registry.yarnpkg.com/globals/-/globals-17.0.0.tgz#a4196d9cfeb4d627ba165b4647b1f5853bf90a30"
|
||||
integrity sha512-gv5BeD2EssA793rlFWVPMMCqefTlpusw6/2TbAVMy0FzcG8wKJn4O+NqJ4+XWmmwrayJgw5TzrmWjFgmz1XPqw==
|
||||
globals@^17.3.0:
|
||||
version "17.3.0"
|
||||
resolved "https://registry.yarnpkg.com/globals/-/globals-17.3.0.tgz#8b96544c2fa91afada02747cc9731c002a96f3b9"
|
||||
integrity sha512-yMqGUQVVCkD4tqjOJf3TnrvaaHDMYp4VlUSObbkIiuCPe/ofdMBFIAcBbCSRFWOnos6qRiTVStDwqPLUclaxIw==
|
||||
|
||||
globalthis@^1.0.3, globalthis@^1.0.4:
|
||||
version "1.0.4"
|
||||
@@ -2985,10 +2985,10 @@ prettier-linter-helpers@^1.0.1:
|
||||
dependencies:
|
||||
fast-diff "^1.1.2"
|
||||
|
||||
prettier@^3.8.0:
|
||||
version "3.8.0"
|
||||
resolved "https://registry.yarnpkg.com/prettier/-/prettier-3.8.0.tgz#f72cf71505133f40cfa2ef77a2668cdc558fcd69"
|
||||
integrity sha512-yEPsovQfpxYfgWNhCfECjG5AQaO+K3dp6XERmOepyPDVqcJm+bjyCVO3pmU+nAPe0N5dDvekfGezt/EIiRe1TA==
|
||||
prettier@^3.8.1:
|
||||
version "3.8.1"
|
||||
resolved "https://registry.yarnpkg.com/prettier/-/prettier-3.8.1.tgz#edf48977cf991558f4fcbd8a3ba6015ba2a3a173"
|
||||
integrity sha512-UOnG6LftzbdaHZcKoPFtOcCKztrQ57WkHDeRD9t/PTQtmT0NHSeWWepj6pS0z/N7+08BHFDQVUrfmfMRcZwbMg==
|
||||
|
||||
punycode.js@^2.3.1:
|
||||
version "2.3.1"
|
||||
@@ -3172,7 +3172,18 @@ safe-regex-test@^1.1.0:
|
||||
es-errors "^1.3.0"
|
||||
is-regex "^1.2.1"
|
||||
|
||||
sass@1.97.2, sass@^1.97.2:
|
||||
sass@1.97.3:
|
||||
version "1.97.3"
|
||||
resolved "https://registry.yarnpkg.com/sass/-/sass-1.97.3.tgz#9cb59339514fa7e2aec592b9700953ac6e331ab2"
|
||||
integrity sha512-fDz1zJpd5GycprAbu4Q2PV/RprsRtKC/0z82z0JLgdytmcq0+ujJbJ/09bPGDxCLkKY3Np5cRAOcWiVkLXJURg==
|
||||
dependencies:
|
||||
chokidar "^4.0.0"
|
||||
immutable "^5.0.2"
|
||||
source-map-js ">=0.6.2 <2.0.0"
|
||||
optionalDependencies:
|
||||
"@parcel/watcher" "^2.4.1"
|
||||
|
||||
sass@^1.97.2:
|
||||
version "1.97.2"
|
||||
resolved "https://registry.yarnpkg.com/sass/-/sass-1.97.2.tgz#e515a319092fd2c3b015228e3094b40198bff0da"
|
||||
integrity sha512-y5LWb0IlbO4e97Zr7c3mlpabcbBtS+ieiZ9iwDooShpFKWXf62zz5pEPdwrLYm+Bxn1fnbwFGzHuCLSA9tBmrw==
|
||||
@@ -3441,7 +3452,7 @@ toggle-selection@^1.0.6:
|
||||
|
||||
tom-select@2.4.3:
|
||||
version "2.4.3"
|
||||
resolved "https://registry.npmjs.org/tom-select/-/tom-select-2.4.3.tgz"
|
||||
resolved "https://registry.yarnpkg.com/tom-select/-/tom-select-2.4.3.tgz#1daa4131cd317de691f39eb5bf41148265986c1f"
|
||||
integrity sha512-MFFrMxP1bpnAMPbdvPCZk0KwYxLqhYZso39torcdoefeV/NThNyDu8dV96/INJ5XQVTL3O55+GqQ78Pkj5oCfw==
|
||||
dependencies:
|
||||
"@orchidjs/sifter" "^1.1.0"
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version: "4.5.1"
|
||||
version: "4.5.2"
|
||||
edition: "Community"
|
||||
published: "2026-01-20"
|
||||
published: "2026-02-03"
|
||||
|
||||
@@ -53,7 +53,7 @@ Blocks:
|
||||
{% nav %}
|
||||
|
||||
{# Release info #}
|
||||
<div class="text-muted text-center fs-5 my-3">
|
||||
<div class="text-muted text-center fs-5 my-3 px-3">
|
||||
{{ settings.RELEASE.name }}
|
||||
{% if not settings.RELEASE.features.commercial and not settings.ISOLATED_DEPLOYMENT %}
|
||||
<div>
|
||||
|
||||
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
@@ -3,6 +3,7 @@ import enum
|
||||
from django.conf import settings
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from utilities.data import get_config_value_ci
|
||||
from utilities.string import enum_key
|
||||
|
||||
__all__ = (
|
||||
@@ -24,13 +25,14 @@ class ChoiceSetMeta(type):
|
||||
).format(name=name)
|
||||
app = attrs['__module__'].split('.', 1)[0]
|
||||
replace_key = f'{app}.{key}'
|
||||
extend_key = f'{replace_key}+' if replace_key else None
|
||||
if replace_key and replace_key in settings.FIELD_CHOICES:
|
||||
# Replace the stock choices
|
||||
attrs['CHOICES'] = settings.FIELD_CHOICES[replace_key]
|
||||
elif extend_key and extend_key in settings.FIELD_CHOICES:
|
||||
# Extend the stock choices
|
||||
attrs['CHOICES'].extend(settings.FIELD_CHOICES[extend_key])
|
||||
replace_choices = get_config_value_ci(settings.FIELD_CHOICES, replace_key)
|
||||
if replace_choices is not None:
|
||||
attrs['CHOICES'] = replace_choices
|
||||
else:
|
||||
extend_key = f'{replace_key}+'
|
||||
extend_choices = get_config_value_ci(settings.FIELD_CHOICES, extend_key)
|
||||
if extend_choices is not None:
|
||||
attrs['CHOICES'].extend(extend_choices)
|
||||
|
||||
# Define choice tuples and color maps
|
||||
attrs['_choices'] = []
|
||||
|
||||
@@ -10,6 +10,7 @@ __all__ = (
|
||||
'deepmerge',
|
||||
'drange',
|
||||
'flatten_dict',
|
||||
'get_config_value_ci',
|
||||
'ranges_to_string',
|
||||
'ranges_to_string_list',
|
||||
'resolve_attr_path',
|
||||
@@ -22,6 +23,19 @@ __all__ = (
|
||||
# Dictionary utilities
|
||||
#
|
||||
|
||||
def get_config_value_ci(config_dict, key, default=None):
|
||||
"""
|
||||
Retrieve a value from a dictionary using case-insensitive key matching.
|
||||
"""
|
||||
if key in config_dict:
|
||||
return config_dict[key]
|
||||
key_lower = key.lower()
|
||||
for config_key, value in config_dict.items():
|
||||
if config_key.lower() == key_lower:
|
||||
return value
|
||||
return default
|
||||
|
||||
|
||||
def deepmerge(original, new):
|
||||
"""
|
||||
Deep merge two dictionaries (new into original) and return a new dict
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from django.test import TestCase
|
||||
from django.test import TestCase, override_settings
|
||||
|
||||
from utilities.choices import ChoiceSet
|
||||
|
||||
@@ -30,3 +30,29 @@ class ChoiceSetTestCase(TestCase):
|
||||
|
||||
def test_values(self):
|
||||
self.assertListEqual(ExampleChoices.values(), ['a', 'b', 'c', 1, 2, 3])
|
||||
|
||||
|
||||
class FieldChoicesCaseInsensitiveTestCase(TestCase):
|
||||
"""
|
||||
Integration tests for FIELD_CHOICES case-insensitive key lookup.
|
||||
"""
|
||||
|
||||
def test_replace_choices_with_different_casing(self):
|
||||
"""Test that replacement works when config key casing differs."""
|
||||
# Config uses lowercase, but code constructs PascalCase key
|
||||
with override_settings(FIELD_CHOICES={'utilities.teststatus': [('new', 'New')]}):
|
||||
class TestStatusChoices(ChoiceSet):
|
||||
key = 'TestStatus' # Code will look up 'utilities.TestStatus'
|
||||
CHOICES = [('old', 'Old')]
|
||||
|
||||
self.assertEqual(TestStatusChoices.CHOICES, [('new', 'New')])
|
||||
|
||||
def test_extend_choices_with_different_casing(self):
|
||||
"""Test that extension works with the + suffix under casing differences."""
|
||||
# Config uses lowercase with + suffix
|
||||
with override_settings(FIELD_CHOICES={'utilities.teststatus+': [('extra', 'Extra')]}):
|
||||
class TestStatusChoices(ChoiceSet):
|
||||
key = 'TestStatus' # Code will look up 'utilities.TestStatus+'
|
||||
CHOICES = [('base', 'Base')]
|
||||
|
||||
self.assertEqual(TestStatusChoices.CHOICES, [('base', 'Base'), ('extra', 'Extra')])
|
||||
|
||||
@@ -2,6 +2,7 @@ from django.db.backends.postgresql.psycopg_any import NumericRange
|
||||
from django.test import TestCase
|
||||
from utilities.data import (
|
||||
check_ranges_overlap,
|
||||
get_config_value_ci,
|
||||
ranges_to_string,
|
||||
ranges_to_string_list,
|
||||
string_to_ranges,
|
||||
@@ -96,3 +97,25 @@ class RangeFunctionsTestCase(TestCase):
|
||||
string_to_ranges('2-10, a-b'),
|
||||
None # Fails to convert
|
||||
)
|
||||
|
||||
|
||||
class GetConfigValueCITestCase(TestCase):
|
||||
|
||||
def test_exact_match(self):
|
||||
config = {'dcim.site': 'value1', 'dcim.Device': 'value2'}
|
||||
self.assertEqual(get_config_value_ci(config, 'dcim.site'), 'value1')
|
||||
self.assertEqual(get_config_value_ci(config, 'dcim.Device'), 'value2')
|
||||
|
||||
def test_case_insensitive_match(self):
|
||||
config = {'dcim.Site': 'value1', 'ipam.IPAddress': 'value2'}
|
||||
self.assertEqual(get_config_value_ci(config, 'dcim.site'), 'value1')
|
||||
self.assertEqual(get_config_value_ci(config, 'ipam.ipaddress'), 'value2')
|
||||
|
||||
def test_default_value(self):
|
||||
config = {'dcim.site': 'value1'}
|
||||
self.assertIsNone(get_config_value_ci(config, 'nonexistent'))
|
||||
self.assertEqual(get_config_value_ci(config, 'nonexistent', default=[]), [])
|
||||
|
||||
def test_empty_dict(self):
|
||||
self.assertIsNone(get_config_value_ci({}, 'any.key'))
|
||||
self.assertEqual(get_config_value_ci({}, 'any.key', default=[]), [])
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
|
||||
[project]
|
||||
name = "netbox"
|
||||
version = "4.5.1"
|
||||
version = "4.5.2"
|
||||
requires-python = ">=3.12"
|
||||
description = "The premier source of truth powering network automation."
|
||||
readme = "README.md"
|
||||
|
||||
@@ -19,12 +19,12 @@ djangorestframework==3.16.1
|
||||
drf-spectacular==0.29.0
|
||||
drf-spectacular-sidecar==2026.1.1
|
||||
feedparser==6.0.12
|
||||
gunicorn==23.0.0
|
||||
gunicorn==24.1.1
|
||||
Jinja2==3.1.6
|
||||
jsonschema==4.26.0
|
||||
Markdown==3.10
|
||||
Markdown==3.10.1
|
||||
mkdocs-material==9.7.1
|
||||
mkdocstrings==1.0.1
|
||||
mkdocstrings==1.0.2
|
||||
mkdocstrings-python==2.0.1
|
||||
netaddr==1.3.0
|
||||
nh3==0.3.2
|
||||
@@ -35,9 +35,9 @@ requests==2.32.5
|
||||
rq==2.6.1
|
||||
social-auth-app-django==5.7.0
|
||||
social-auth-core==4.8.3
|
||||
sorl-thumbnail==12.11.0
|
||||
strawberry-graphql==0.289.2
|
||||
strawberry-graphql-django==0.74.1
|
||||
sorl-thumbnail==13.0.0
|
||||
strawberry-graphql==0.291.0
|
||||
strawberry-graphql-django==0.75.0
|
||||
svgwrite==1.4.3
|
||||
tablib==3.9.0
|
||||
tzdata==2025.3
|
||||
|
||||
Reference in New Issue
Block a user