mirror of
https://github.com/netbox-community/netbox.git
synced 2026-02-15 05:17:42 +01:00
Compare commits
128 Commits
v4.4.4
...
20068-impo
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ebf8f7fa1b | ||
|
|
922b08c0ff | ||
|
|
84864fa5e1 | ||
|
|
767dfccd8f | ||
|
|
dc4bab7477 | ||
|
|
60aa952eb1 | ||
|
|
8b3f7ce507 | ||
|
|
adad3745ae | ||
|
|
8055fae253 | ||
|
|
aac3a51431 | ||
|
|
3e0ad2176f | ||
|
|
4e8edfb3d6 | ||
|
|
651557a82b | ||
|
|
c3d66dc42e | ||
|
|
a50e570f22 | ||
|
|
a44a79ec79 | ||
|
|
b919868521 | ||
|
|
d9aab6bbe2 | ||
|
|
82171fce7a | ||
|
|
020eb64eab | ||
|
|
ec7afccd55 | ||
|
|
76fd63823c | ||
|
|
6c373decd6 | ||
|
|
222b26e060 | ||
|
|
066b787777 | ||
|
|
90b2732068 | ||
|
|
bfba0ccaae | ||
|
|
d5718357f1 | ||
|
|
d61737396b | ||
|
|
c6248f1142 | ||
|
|
05f254a768 | ||
|
|
0cb10f806a | ||
|
|
8ac7f6f8de | ||
|
|
cd8087ab43 | ||
|
|
da5ae21150 | ||
|
|
fbb948d30e | ||
|
|
975e0ff398 | ||
|
|
d7877b7627 | ||
|
|
b685df7c9c | ||
|
|
9dcf9475cc | ||
|
|
e1bf27e4db | ||
|
|
9b89af75e4 | ||
|
|
9e13d89baa | ||
|
|
4961b0d334 | ||
|
|
ab06edd9f5 | ||
|
|
e787a71c1d | ||
|
|
cd8878df30 | ||
|
|
b5a9cb1762 | ||
|
|
9723a2f0ad | ||
|
|
327d08f4c2 | ||
|
|
4be476eb49 | ||
|
|
8005b56ab4 | ||
|
|
3f1654c9ba | ||
|
|
95f8fe788d | ||
|
|
588c069ff1 | ||
|
|
5b3ff3c0e9 | ||
|
|
730d73042d | ||
|
|
6c2a6d0e90 | ||
|
|
e6a6ff7aec | ||
|
|
87ff83ef1f | ||
|
|
3cdc6251be | ||
|
|
0e1705b870 | ||
|
|
8522c03b71 | ||
|
|
20af97ce24 | ||
|
|
264b40a269 | ||
|
|
cbf9b62f12 | ||
|
|
c429cc3638 | ||
|
|
032ed4f11c | ||
|
|
7ca4342c15 | ||
|
|
70bc1c226a | ||
|
|
6a21459ccc | ||
|
|
635de4af2e | ||
|
|
df96f7dd0f | ||
|
|
90712fa865 | ||
|
|
fbe76ac98a | ||
|
|
0b61d69e05 | ||
|
|
1245a9f99d | ||
|
|
78223cea03 | ||
|
|
8452222761 | ||
|
|
8a59fc733c | ||
|
|
df688ce064 | ||
|
|
1a1ab2a19d | ||
|
|
80f03daad6 | ||
|
|
d04c41d0f6 | ||
|
|
1fc849eb40 | ||
|
|
bbf1f6181d | ||
|
|
729b0365e0 | ||
|
|
43cb476223 | ||
|
|
d6f756d315 | ||
|
|
afc62b6ffd | ||
|
|
3d4841f17f | ||
|
|
2aefb3af73 | ||
|
|
4eff4d6a4a | ||
|
|
9381564cab | ||
|
|
3d143d635b | ||
|
|
77307b3c91 | ||
|
|
aa4571b61f | ||
|
|
56d9146323 | ||
|
|
e192f64dd2 | ||
|
|
d433a28524 | ||
|
|
dbfdf318ad | ||
|
|
639bc4462b | ||
|
|
1c59d411f7 | ||
|
|
ac7a4ec4a3 | ||
|
|
0cf58e62b2 | ||
|
|
fb8d41b527 | ||
|
|
ae5d7911f9 | ||
|
|
3bd0186870 | ||
|
|
09ce8a808d | ||
|
|
8eaff9dce7 | ||
|
|
cb3308a166 | ||
|
|
5fbae8407e | ||
|
|
2fdd46f64c | ||
|
|
c5124cb2e4 | ||
|
|
d01d7b4156 | ||
|
|
4db6123fb2 | ||
|
|
43648d629b | ||
|
|
0b97df0984 | ||
|
|
5334c8143c | ||
|
|
bbb330becf | ||
|
|
e4c74ce6a3 | ||
|
|
a4868f894d | ||
|
|
531ea34207 | ||
|
|
6747c82a1a | ||
|
|
e251ea10b5 | ||
|
|
a1aaf465ac | ||
|
|
2a1d315d85 | ||
|
|
8cc6589a35 |
@@ -2,7 +2,7 @@
|
||||
name: ✨ Feature Request
|
||||
type: Feature
|
||||
description: Propose a new NetBox feature or enhancement
|
||||
labels: ["type: feature", "status: needs triage"]
|
||||
labels: ["netbox", "type: feature", "status: needs triage"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
@@ -15,7 +15,7 @@ body:
|
||||
attributes:
|
||||
label: NetBox version
|
||||
description: What version of NetBox are you currently running?
|
||||
placeholder: v4.4.4
|
||||
placeholder: v4.4.7
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
|
||||
4
.github/ISSUE_TEMPLATE/02-bug_report.yaml
vendored
4
.github/ISSUE_TEMPLATE/02-bug_report.yaml
vendored
@@ -2,7 +2,7 @@
|
||||
name: 🐛 Bug Report
|
||||
type: Bug
|
||||
description: Report a reproducible bug in the current release of NetBox
|
||||
labels: ["type: bug", "status: needs triage"]
|
||||
labels: ["netbox", "type: bug", "status: needs triage"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
@@ -27,7 +27,7 @@ body:
|
||||
attributes:
|
||||
label: NetBox Version
|
||||
description: What version of NetBox are you currently running?
|
||||
placeholder: v4.4.4
|
||||
placeholder: v4.4.7
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
name: 📖 Documentation Change
|
||||
type: Documentation
|
||||
description: Suggest an addition or modification to the NetBox documentation
|
||||
labels: ["type: documentation", "status: needs triage"]
|
||||
labels: ["netbox", "type: documentation", "status: needs triage"]
|
||||
body:
|
||||
- type: dropdown
|
||||
attributes:
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/04-translation.yaml
vendored
2
.github/ISSUE_TEMPLATE/04-translation.yaml
vendored
@@ -2,7 +2,7 @@
|
||||
name: 🌍 Translation
|
||||
type: Translation
|
||||
description: Request support for a new language in the user interface
|
||||
labels: ["type: translation"]
|
||||
labels: ["netbox", "type: translation"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/05-housekeeping.yaml
vendored
2
.github/ISSUE_TEMPLATE/05-housekeeping.yaml
vendored
@@ -2,7 +2,7 @@
|
||||
name: 🏡 Housekeeping
|
||||
type: Housekeeping
|
||||
description: A change pertaining to the codebase itself (developers only)
|
||||
labels: ["type: housekeeping"]
|
||||
labels: ["netbox", "type: housekeeping"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/06-deprecation.yaml
vendored
2
.github/ISSUE_TEMPLATE/06-deprecation.yaml
vendored
@@ -2,7 +2,7 @@
|
||||
name: 🗑️ Deprecation
|
||||
type: Deprecation
|
||||
description: The removal of an existing feature or resource
|
||||
labels: ["type: deprecation"]
|
||||
labels: ["netbox", "type: deprecation"]
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.6.9
|
||||
rev: v0.14.1
|
||||
hooks:
|
||||
- id: ruff
|
||||
name: "Ruff linter"
|
||||
@@ -21,14 +21,6 @@ repos:
|
||||
language: system
|
||||
pass_filenames: false
|
||||
types: [python]
|
||||
- id: openapi-check
|
||||
name: "Validate OpenAPI schema"
|
||||
description: "Check for any unexpected changes to the OpenAPI schema"
|
||||
files: api/.*\.py$
|
||||
entry: scripts/verify-openapi.sh
|
||||
language: system
|
||||
pass_filenames: false
|
||||
types: [python]
|
||||
- id: mkdocs-build
|
||||
name: "Build documentation"
|
||||
description: "Build the documentation with mkdocs"
|
||||
|
||||
@@ -186,6 +186,7 @@
|
||||
"usb-3-micro-b",
|
||||
"molex-micro-fit-1x2",
|
||||
"molex-micro-fit-2x2",
|
||||
"molex-micro-fit-2x3",
|
||||
"molex-micro-fit-2x4",
|
||||
"dc-terminal",
|
||||
"saf-d-grid",
|
||||
@@ -293,6 +294,7 @@
|
||||
"usb-c",
|
||||
"molex-micro-fit-1x2",
|
||||
"molex-micro-fit-2x2",
|
||||
"molex-micro-fit-2x3",
|
||||
"molex-micro-fit-2x4",
|
||||
"dc-terminal",
|
||||
"eaton-c39",
|
||||
|
||||
6521
contrib/openapi.json
6521
contrib/openapi.json
File diff suppressed because one or more lines are too long
@@ -35,6 +35,7 @@ Some configuration parameters are primarily controlled via NetBox's admin interf
|
||||
* [`POWERFEED_DEFAULT_MAX_UTILIZATION`](./default-values.md#powerfeed_default_max_utilization)
|
||||
* [`POWERFEED_DEFAULT_VOLTAGE`](./default-values.md#powerfeed_default_voltage)
|
||||
* [`PREFER_IPV4`](./miscellaneous.md#prefer_ipv4)
|
||||
* [`PROTECTION_RULES`](./data-validation.md#protection_rules)
|
||||
* [`RACK_ELEVATION_DEFAULT_UNIT_HEIGHT`](./default-values.md#rack_elevation_default_unit_height)
|
||||
* [`RACK_ELEVATION_DEFAULT_UNIT_WIDTH`](./default-values.md#rack_elevation_default_unit_width)
|
||||
|
||||
|
||||
@@ -53,6 +53,16 @@ Sets content for the top banner in the user interface.
|
||||
|
||||
---
|
||||
|
||||
## COPILOT_ENABLED
|
||||
|
||||
!!! tip "Dynamic Configuration Parameter"
|
||||
|
||||
Default: `True`
|
||||
|
||||
Enables or disables the [NetBox Copilot](https://netboxlabs.com/docs/copilot/) agent globally. When enabled, users can opt to toggle the agent individually.
|
||||
|
||||
---
|
||||
|
||||
## CENSUS_REPORTING_ENABLED
|
||||
|
||||
Default: `True`
|
||||
|
||||
@@ -92,7 +92,7 @@ If `True`, the cookie employed for cross-site request forgery (CSRF) protection
|
||||
|
||||
Default: `[]`
|
||||
|
||||
Defines a list of trusted origins for unsafe (e.g. `POST`) requests. This is a pass-through to Django's [`CSRF_TRUSTED_ORIGINS`](https://docs.djangoproject.com/en/stable/ref/settings/#csrf-trusted-origins) setting. Note that each host listed must specify a scheme (e.g. `http://` or `https://).
|
||||
Defines a list of trusted origins for unsafe (e.g. `POST`) requests. This is a pass-through to Django's [`CSRF_TRUSTED_ORIGINS`](https://docs.djangoproject.com/en/stable/ref/settings/#csrf-trusted-origins) setting. Note that each host listed must specify a scheme (e.g. `http://` or `https://`).
|
||||
|
||||
```python
|
||||
CSRF_TRUSTED_ORIGINS = (
|
||||
|
||||
@@ -232,6 +232,9 @@ STORAGES = {
|
||||
},
|
||||
"scripts": {
|
||||
"BACKEND": "extras.storage.ScriptFileSystemStorage",
|
||||
"OPTIONS": {
|
||||
"allow_overwrite": True,
|
||||
},
|
||||
},
|
||||
}
|
||||
```
|
||||
@@ -247,6 +250,7 @@ STORAGES = {
|
||||
"OPTIONS": {
|
||||
'access_key': 'access key',
|
||||
'secret_key': 'secret key',
|
||||
"allow_overwrite": True,
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
@@ -95,7 +95,7 @@ An example fieldset definition is provided below:
|
||||
|
||||
```python
|
||||
class MyScript(Script):
|
||||
class Meta:
|
||||
class Meta(Script.Meta):
|
||||
fieldsets = (
|
||||
('First group', ('field1', 'field2', 'field3')),
|
||||
('Second group', ('field4', 'field5')),
|
||||
@@ -404,6 +404,61 @@ A complete date & time. Returns a `datetime.datetime` object.
|
||||
|
||||
Custom scripts can be run via the web UI by navigating to the script, completing any required form data, and clicking the "run script" button. It is possible to schedule a script to be executed at specified time in the future. A scheduled script can be canceled by deleting the associated job result object.
|
||||
|
||||
#### Prefilling variables via URL parameters
|
||||
|
||||
Script form fields can be prefilled by appending query parameters to the script URL. Each parameter name must match the variable name defined on the script class. Prefilled values are treated as initial values and can be edited before execution. Multiple values can be supplied by repeating the same parameter. Query values must be percent‑encoded where required (for example, spaces as `%20`).
|
||||
|
||||
Examples:
|
||||
|
||||
For string and integer variables, when a script defines:
|
||||
|
||||
```python
|
||||
from extras.scripts import Script, StringVar, IntegerVar
|
||||
|
||||
class MyScript(Script):
|
||||
name = StringVar()
|
||||
count = IntegerVar()
|
||||
```
|
||||
|
||||
the following URL prefills the `name` and `count` fields:
|
||||
|
||||
```
|
||||
https://<netbox>/extras/scripts/<script_id>/?name=Branch42&count=3
|
||||
```
|
||||
|
||||
For object variables (`ObjectVar`), supply the object’s primary key (PK):
|
||||
|
||||
```
|
||||
https://<netbox>/extras/scripts/<script_id>/?device=1
|
||||
```
|
||||
|
||||
If an object ID cannot be resolved or the object is not visible to the requesting user, the field remains unpopulated.
|
||||
|
||||
Supported variable types:
|
||||
|
||||
| Variable class | Expected input | Example query string |
|
||||
|--------------------------|---------------------------------|---------------------------------------------|
|
||||
| `StringVar` | string (percent‑encoded) | `?name=Branch42` |
|
||||
| `TextVar` | string (percent‑encoded) | `?notes=Initial%20value` |
|
||||
| `IntegerVar` | integer | `?count=3` |
|
||||
| `DecimalVar` | decimal number | `?ratio=0.75` |
|
||||
| `BooleanVar` | value → `True`; empty → `False` | `?enabled=true` (True), `?enabled=` (False) |
|
||||
| `ChoiceVar` | choice value (not label) | `?role=edge` |
|
||||
| `MultiChoiceVar` | choice values (repeat) | `?roles=edge&roles=core` |
|
||||
| `ObjectVar(Device)` | PK (integer) | `?device=1` |
|
||||
| `MultiObjectVar(Device)` | PKs (repeat) | `?devices=1&devices=2` |
|
||||
| `IPAddressVar` | IP address | `?ip=198.51.100.10` |
|
||||
| `IPAddressWithMaskVar` | IP address with mask | `?addr=192.0.2.1/24` |
|
||||
| `IPNetworkVar` | IP network prefix | `?network=2001:db8::/64` |
|
||||
| `DateVar` | date `YYYY-MM-DD` | `?date=2025-01-05` |
|
||||
| `DateTimeVar` | ISO datetime | `?when=2025-01-05T14:30:00` |
|
||||
| `FileVar` | — (not supported) | — |
|
||||
|
||||
!!! note
|
||||
- The parameter names above are examples; use the actual variable attribute names defined by the script.
|
||||
- For `BooleanVar`, only an empty value (`?enabled=`) unchecks the box; any other value including `false` or `0` checks it.
|
||||
- File uploads (`FileVar`) cannot be prefilled via URL parameters.
|
||||
|
||||
### Via the API
|
||||
|
||||
To run a script via the REST API, issue a POST request to the script's endpoint specifying the form data and commitment. For example, to run a script named `example.MyReport`, we would make a request such as the following:
|
||||
@@ -455,7 +510,7 @@ from extras.scripts import *
|
||||
|
||||
class NewBranchScript(Script):
|
||||
|
||||
class Meta:
|
||||
class Meta(Script.Meta):
|
||||
name = "New Branch"
|
||||
description = "Provision a new branch site"
|
||||
field_order = ['site_name', 'switch_count', 'switch_model']
|
||||
|
||||
@@ -12,7 +12,7 @@ Depending on its classification, each NetBox model may support various features
|
||||
|
||||
| Feature | Feature Mixin | Registry Key | Description |
|
||||
|------------------------------------------------------------|-------------------------|---------------------|-----------------------------------------------------------------------------------------|
|
||||
| [Bookmarks](../features/customization.md#bookmarks) | `BookmarksMixin` | `bookmarks` | These models can be bookmarked natively in the user interface |
|
||||
| [Bookmarks](../features/user-preferences.md#bookmarks) | `BookmarksMixin` | `bookmarks` | These models can be bookmarked natively in the user interface |
|
||||
| [Change logging](../features/change-logging.md) | `ChangeLoggingMixin` | `change_logging` | Changes to these objects are automatically recorded in the change log |
|
||||
| Cloning | `CloningMixin` | `cloning` | Provides the `clone()` method to prepare a copy |
|
||||
| [Contacts](../features/contacts.md) | `ContactsMixin` | `contacts` | Contacts can be associated with these models |
|
||||
|
||||
@@ -6,10 +6,14 @@ For end‑user guidance on resetting saved table layouts, see [Features > User P
|
||||
|
||||
## Available Preferences
|
||||
|
||||
| Name | Description |
|
||||
|--------------------------|---------------------------------------------------------------|
|
||||
| data_format | Preferred format when rendering raw data (JSON or YAML) |
|
||||
| pagination.per_page | The number of items to display per page of a paginated table |
|
||||
| pagination.placement | Where to display the paginator controls relative to the table |
|
||||
| tables.${table}.columns | The ordered list of columns to display when viewing the table |
|
||||
| tables.${table}.ordering | A list of column names by which the table should be ordered |
|
||||
| Name | Description |
|
||||
|----------------------------|---------------------------------------------------------------|
|
||||
| `csv_delimiter` | The delimiting character used when exporting CSV data |
|
||||
| `data_format` | Preferred format when rendering raw data (JSON or YAML) |
|
||||
| `locale.language` | The language selected for UI translation |
|
||||
| `pagination.per_page` | The number of items to display per page of a paginated table |
|
||||
| `pagination.placement` | Where to display the paginator controls relative to the table |
|
||||
| `tables.${table}.columns` | The ordered list of columns to display when viewing the table |
|
||||
| `tables.${table}.ordering` | A list of column names by which the table should be ordered |
|
||||
| `ui.copilot_enabled` | Toggles the NetBox Copilot AI agent |
|
||||
| `ui.tables.striping` | Toggles visual striping of tables in the UI |
|
||||
|
||||
@@ -60,6 +60,13 @@ Four of the standard Python logging levels are supported:
|
||||
|
||||
Log entries recorded using the runner's logger will be saved in the job's log in the database in addition to being processed by other [system logging handlers](../../configuration/system.md#logging).
|
||||
|
||||
### Jobs running for Model instances
|
||||
|
||||
A Job can be executed for a specific instance of a Model.
|
||||
To enable this functionality, the model must include the `JobsMixin`.
|
||||
|
||||
When enqueuing a Job, you can associate it with a particular instance by passing that instance to the `instance` parameter.
|
||||
|
||||
### Scheduled Jobs
|
||||
|
||||
As described above, jobs can be scheduled for immediate execution or at any later time using the `enqueue()` method. However, for management purposes, the `enqueue_once()` method allows a job to be scheduled exactly once avoiding duplicates. If a job is already scheduled for a particular instance, a second one won't be scheduled, respecting thread safety. An example use case would be to schedule a periodic task that is bound to an instance in general, but not to any event of that instance (such as updates). The parameters of the `enqueue_once()` method are identical to those of `enqueue()`.
|
||||
@@ -73,9 +80,10 @@ As described above, jobs can be scheduled for immediate execution or at any late
|
||||
from django.db import models
|
||||
from core.choices import JobIntervalChoices
|
||||
from netbox.models import NetBoxModel
|
||||
from netbox.models.features import JobsMixin
|
||||
from .jobs import MyTestJob
|
||||
|
||||
class MyModel(NetBoxModel):
|
||||
class MyModel(JobsMixin, NetBoxModel):
|
||||
foo = models.CharField()
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
|
||||
@@ -55,6 +55,27 @@ class MyModelViewSet(...):
|
||||
filterset_class = filtersets.MyModelFilterSet
|
||||
```
|
||||
|
||||
### Implementing Quick Search
|
||||
|
||||
The `ObjectListView` has a field called Quick Search. For Quick Search to work the corresponding FilterSet has to override the `search` method that is implemented in `NetBoxModelFilterSet`. This function takes a queryset and can perform arbitrary operations on it and return it. A common use-case is to search for the given search value in multiple fields:
|
||||
|
||||
```python
|
||||
from django.db.models import Q
|
||||
from netbox.filtersets import NetBoxModelFilterSet
|
||||
|
||||
class MyFilterSet(NetBoxModelFilterSet):
|
||||
...
|
||||
def search(self, queryset, name, value):
|
||||
if not value.strip():
|
||||
return queryset
|
||||
return queryset.filter(
|
||||
Q(name__icontains=value) |
|
||||
Q(description__icontains=value)
|
||||
)
|
||||
```
|
||||
|
||||
The `search` method is also used by the `q` filter in `NetBoxModelFilterSet` which in turn is used by the Search field in the filters tab.
|
||||
|
||||
## Filter Classes
|
||||
|
||||
### TagFilter
|
||||
|
||||
@@ -1,5 +1,94 @@
|
||||
# NetBox v4.4
|
||||
|
||||
## v4.4.7 (2025-11-25)
|
||||
|
||||
### Enhancements
|
||||
|
||||
* [#20371](https://github.com/netbox-community/netbox/issues/20371) - Add Molex Micro-Fit 2x3 for power ports & power outlets
|
||||
* [#20731](https://github.com/netbox-community/netbox/issues/20731) - Enable specifying `data_source` & `data_file` when bulk import config templates
|
||||
* [#20820](https://github.com/netbox-community/netbox/issues/20820) - Enable filtering of custom fields by object type
|
||||
* [#20823](https://github.com/netbox-community/netbox/issues/20823) - Disallow creation of API tokens with an expiration date in the past
|
||||
* [#20841](https://github.com/netbox-community/netbox/issues/20841) - Support advanced filtering for available rack types when creating/editing a rack
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* [#20134](https://github.com/netbox-community/netbox/issues/20134) - Prevent out-of-band HTMX content swaps in embedded tables
|
||||
* [#20432](https://github.com/netbox-community/netbox/issues/20432) - Fix tracing of cables across multiple circuits in parallel
|
||||
* [#20465](https://github.com/netbox-community/netbox/issues/20465) - Ensure that scripts are updated immediately when a new file is uploaded
|
||||
* [#20638](https://github.com/netbox-community/netbox/issues/20638) - Correct OpenAPI schema for bulk create operations
|
||||
* [#20649](https://github.com/netbox-community/netbox/issues/20649) - Enforce view permissions on REST API endpoint for custom scripts
|
||||
* [#20740](https://github.com/netbox-community/netbox/issues/20740) - Ensure permissions constraints are enforced when executing custom scripts via the REST API
|
||||
* [#20743](https://github.com/netbox-community/netbox/issues/20743) - Pass request context to custom script when triggered by an event rule
|
||||
* [#20766](https://github.com/netbox-community/netbox/issues/20766) - Fix inadvertent translations on server error page
|
||||
* [#20775](https://github.com/netbox-community/netbox/issues/20775) - Fix `TypeError` exception when bulk renaming unnamed devices
|
||||
* [#20822](https://github.com/netbox-community/netbox/issues/20822) - Add missing `auto_sync_enabled` field in bulk edit forms
|
||||
* [#20827](https://github.com/netbox-community/netbox/issues/20827) - Fix UI styling issue when toggling between light and dark mode
|
||||
* [#20839](https://github.com/netbox-community/netbox/issues/20839) - Fix filtering by object type in UI for custom links and saved filters
|
||||
* [#20840](https://github.com/netbox-community/netbox/issues/20840) - Remove extraneous references to airflow for RackType model
|
||||
* [#20844](https://github.com/netbox-community/netbox/issues/20844) - Fix object type filter for L2VPN terminations
|
||||
* [#20859](https://github.com/netbox-community/netbox/issues/20859) - Prevent dashboard crash due to exception raised by a widget
|
||||
* [#20865](https://github.com/netbox-community/netbox/issues/20865) - Enforce proper min/max values for latitude & longitude fields
|
||||
|
||||
---
|
||||
|
||||
## v4.4.6 (2025-11-11)
|
||||
|
||||
### Enhancements
|
||||
|
||||
* [#14171](https://github.com/netbox-community/netbox/issues/14171) - Support VLAN assignment for device & VM interfaces being bulk imported
|
||||
* [#20297](https://github.com/netbox-community/netbox/issues/20297) - Introduce additional coaxial cable types
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* [#20378](https://github.com/netbox-community/netbox/issues/20378) - Prevent exception when attempting to delete a data source utilized by a custom script
|
||||
* [#20645](https://github.com/netbox-community/netbox/issues/20645) - CSVChoiceField should defer to model field's default value when CSV field is empty
|
||||
* [#20647](https://github.com/netbox-community/netbox/issues/20647) - Improve handling of empty strings during bulk imports
|
||||
* [#20653](https://github.com/netbox-community/netbox/issues/20653) - Fix filtering of jobs by object type ID
|
||||
* [#20660](https://github.com/netbox-community/netbox/issues/20660) - Optimize loading of custom script modules from remote storage
|
||||
* [#20670](https://github.com/netbox-community/netbox/issues/20670) - Improve validation of related objects during bulk import
|
||||
* [#20688](https://github.com/netbox-community/netbox/issues/20688) - Suppress non-harmful "No active configuration revision found" warning message
|
||||
* [#20697](https://github.com/netbox-community/netbox/issues/20697) - Prevent duplication of signals which increment/decrement related object counts
|
||||
* [#20699](https://github.com/netbox-community/netbox/issues/20699) - Ensure proper ordering of changelog entries resulting from cascading deletions
|
||||
* [#20713](https://github.com/netbox-community/netbox/issues/20713) - Ensure a pre-change snapshot is recorded on virtual chassis members being added/removed
|
||||
* [#20721](https://github.com/netbox-community/netbox/issues/20721) - Fix breadcrumb navigation links in UI for background tasks
|
||||
* [#20738](https://github.com/netbox-community/netbox/issues/20738) - Deleting a virtual chassis should nullify the `vc_position` of all former members
|
||||
* [#20750](https://github.com/netbox-community/netbox/issues/20750) - Fix cloning of permissions when only one action is enabled
|
||||
* [#20755](https://github.com/netbox-community/netbox/issues/20755) - Prevent duplicate results under certain conditions when filtering providers
|
||||
* [#20771](https://github.com/netbox-community/netbox/issues/20771) - Comments are required when creating a new journal entry
|
||||
* [#20774](https://github.com/netbox-community/netbox/issues/20774) - Bulk action button labels should be translated
|
||||
|
||||
---
|
||||
|
||||
## v4.4.5 (2025-10-28)
|
||||
|
||||
### Enhancements
|
||||
|
||||
* [#19751](https://github.com/netbox-community/netbox/issues/19751) - Disable occupied module bays in form dropdowns when installing a new module
|
||||
* [#20301](https://github.com/netbox-community/netbox/issues/20301) - Add a "dismiss all" option to the notifications dropdown
|
||||
* [#20399](https://github.com/netbox-community/netbox/issues/20399) - Add `assigned` and `primary` boolean filters for MAC addresses
|
||||
* [#20567](https://github.com/netbox-community/netbox/issues/20567) - Add contacts column to services table
|
||||
* [#20675](https://github.com/netbox-community/netbox/issues/20675) - Enable [NetBox Copilot](https://netboxlabs.com/products/netbox-copilot/) integration
|
||||
* [#20692](https://github.com/netbox-community/netbox/issues/20692) - Add contacts column to IP addresses table
|
||||
* [#20700](https://github.com/netbox-community/netbox/issues/20700) - Add contacts table column for various additional models
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* [#19872](https://github.com/netbox-community/netbox/issues/19872) - Ensure custom script validation failures display error messages
|
||||
* [#20389](https://github.com/netbox-community/netbox/issues/20389) - Fix "select all" behavior for bulk rename views
|
||||
* [#20422](https://github.com/netbox-community/netbox/issues/20422) - Enable filtering of aggregates and prefixes by family in GraphQL API
|
||||
* [#20459](https://github.com/netbox-community/netbox/issues/20459) - Fix validation of `is_oob` & `is_primary` fields under IP address bulk import
|
||||
* [#20466](https://github.com/netbox-community/netbox/issues/20466) - Fix querying of devices with a primary IP assigned in GraphQL API
|
||||
* [#20498](https://github.com/netbox-community/netbox/issues/20498) - Enforce the validation regex (if set) for custom URL fields
|
||||
* [#20524](https://github.com/netbox-community/netbox/issues/20524) - Raise a validation error when attempting to schedule a custom script for a past date/time
|
||||
* [#20541](https://github.com/netbox-community/netbox/issues/20541) - Fix resolution of GraphQL object fields which rely on custom filters
|
||||
* [#20551](https://github.com/netbox-community/netbox/issues/20551) - Fix automatic slug generation in quick-add UI form
|
||||
* [#20606](https://github.com/netbox-community/netbox/issues/20606) - Enable copying of values from table columns rendered as badges
|
||||
* [#20641](https://github.com/netbox-community/netbox/issues/20641) - Fix `AttributeError` exception raised by the object changes REST API endpoint
|
||||
* [#20646](https://github.com/netbox-community/netbox/issues/20646) - Prevent cables from connecting to objects marked as connected
|
||||
* [#20655](https://github.com/netbox-community/netbox/issues/20655) - Fix `FieldError` exception when attempting to sort permissions list by actions
|
||||
|
||||
---
|
||||
|
||||
## v4.4.4 (2025-10-15)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
@@ -89,8 +89,6 @@ class ProviderFilterSet(NetBoxModelFilterSet, ContactModelFilterSet):
|
||||
return queryset.filter(
|
||||
Q(name__icontains=value) |
|
||||
Q(description__icontains=value) |
|
||||
Q(accounts__account__icontains=value) |
|
||||
Q(accounts__name__icontains=value) |
|
||||
Q(comments__icontains=value)
|
||||
)
|
||||
|
||||
|
||||
@@ -83,6 +83,7 @@ class ProviderBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(Provider, 'bulk_rename', path='rename', detail=False)
|
||||
class ProviderBulkRenameView(generic.BulkRenameView):
|
||||
queryset = Provider.objects.all()
|
||||
filterset = filtersets.ProviderFilterSet
|
||||
|
||||
|
||||
@register_model_view(Provider, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -150,6 +151,7 @@ class ProviderAccountBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(ProviderAccount, 'bulk_rename', path='rename', detail=False)
|
||||
class ProviderAccountBulkRenameView(generic.BulkRenameView):
|
||||
queryset = ProviderAccount.objects.all()
|
||||
filterset = filtersets.ProviderAccountFilterSet
|
||||
|
||||
|
||||
@register_model_view(ProviderAccount, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -226,6 +228,7 @@ class ProviderNetworkBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(ProviderNetwork, 'bulk_rename', path='rename', detail=False)
|
||||
class ProviderNetworkBulkRenameView(generic.BulkRenameView):
|
||||
queryset = ProviderNetwork.objects.all()
|
||||
filterset = filtersets.ProviderNetworkFilterSet
|
||||
|
||||
|
||||
@register_model_view(ProviderNetwork, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -290,6 +293,7 @@ class CircuitTypeBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(CircuitType, 'bulk_rename', path='rename', detail=False)
|
||||
class CircuitTypeBulkRenameView(generic.BulkRenameView):
|
||||
queryset = CircuitType.objects.all()
|
||||
filterset = filtersets.CircuitTypeFilterSet
|
||||
|
||||
|
||||
@register_model_view(CircuitType, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -362,6 +366,7 @@ class CircuitBulkEditView(generic.BulkEditView):
|
||||
class CircuitBulkRenameView(generic.BulkRenameView):
|
||||
queryset = Circuit.objects.all()
|
||||
field_name = 'cid'
|
||||
filterset = filtersets.CircuitFilterSet
|
||||
|
||||
|
||||
@register_model_view(Circuit, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -557,6 +562,7 @@ class CircuitGroupBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(CircuitGroup, 'bulk_rename', path='rename', detail=False)
|
||||
class CircuitGroupBulkRenameView(generic.BulkRenameView):
|
||||
queryset = CircuitGroup.objects.all()
|
||||
filterset = filtersets.CircuitGroupFilterSet
|
||||
|
||||
|
||||
@register_model_view(CircuitGroup, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -672,6 +678,7 @@ class VirtualCircuitTypeBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(VirtualCircuitType, 'bulk_rename', path='rename', detail=False)
|
||||
class VirtualCircuitTypeBulkRenameView(generic.BulkRenameView):
|
||||
queryset = VirtualCircuitType.objects.all()
|
||||
filterset = filtersets.VirtualCircuitTypeFilterSet
|
||||
|
||||
|
||||
@register_model_view(VirtualCircuitType, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -744,6 +751,7 @@ class VirtualCircuitBulkEditView(generic.BulkEditView):
|
||||
class VirtualCircuitBulkRenameView(generic.BulkRenameView):
|
||||
queryset = VirtualCircuit.objects.all()
|
||||
field_name = 'cid'
|
||||
filterset = filtersets.VirtualCircuitFilterSet
|
||||
|
||||
|
||||
@register_model_view(VirtualCircuit, 'bulk_delete', path='delete', detail=False)
|
||||
|
||||
@@ -12,6 +12,7 @@ from drf_spectacular.utils import Direction
|
||||
|
||||
from netbox.api.fields import ChoiceField
|
||||
from netbox.api.serializers import WritableNestedSerializer
|
||||
from netbox.api.viewsets import NetBoxModelViewSet
|
||||
|
||||
# see netbox.api.routers.NetBoxRouter
|
||||
BULK_ACTIONS = ("bulk_destroy", "bulk_partial_update", "bulk_update")
|
||||
@@ -49,6 +50,11 @@ class ChoiceFieldFix(OpenApiSerializerFieldExtension):
|
||||
)
|
||||
|
||||
|
||||
def viewset_handles_bulk_create(view):
|
||||
"""Check if view automatically provides list-based bulk create"""
|
||||
return isinstance(view, NetBoxModelViewSet)
|
||||
|
||||
|
||||
class NetBoxAutoSchema(AutoSchema):
|
||||
"""
|
||||
Overrides to drf_spectacular.openapi.AutoSchema to fix following issues:
|
||||
@@ -128,6 +134,36 @@ class NetBoxAutoSchema(AutoSchema):
|
||||
|
||||
return response_serializers
|
||||
|
||||
def _get_request_for_media_type(self, serializer, direction='request'):
|
||||
"""
|
||||
Override to generate oneOf schema for serializers that support both
|
||||
single object and array input (NetBoxModelViewSet POST operations).
|
||||
|
||||
Refs: #20638
|
||||
"""
|
||||
# Get the standard schema first
|
||||
schema, required = super()._get_request_for_media_type(serializer, direction)
|
||||
|
||||
# If this serializer supports arrays (marked in get_request_serializer),
|
||||
# wrap the schema in oneOf to allow single object OR array
|
||||
if (
|
||||
direction == 'request' and
|
||||
schema is not None and
|
||||
getattr(self.view, 'action', None) == 'create' and
|
||||
viewset_handles_bulk_create(self.view)
|
||||
):
|
||||
return {
|
||||
'oneOf': [
|
||||
schema, # Single object
|
||||
{
|
||||
'type': 'array',
|
||||
'items': schema, # Array of objects
|
||||
}
|
||||
]
|
||||
}, required
|
||||
|
||||
return schema, required
|
||||
|
||||
def _get_serializer_name(self, serializer, direction, bypass_extensions=False) -> str:
|
||||
name = super()._get_serializer_name(serializer, direction, bypass_extensions)
|
||||
|
||||
|
||||
@@ -1,8 +1,13 @@
|
||||
from drf_spectacular.utils import extend_schema_field
|
||||
from rest_framework import serializers
|
||||
|
||||
from core.choices import *
|
||||
from core.models import Job
|
||||
from netbox.api.exceptions import SerializerNotFound
|
||||
from netbox.api.fields import ChoiceField, ContentTypeField
|
||||
from netbox.api.serializers import BaseModelSerializer
|
||||
from users.api.serializers_.users import UserSerializer
|
||||
from utilities.api import get_serializer_for_model
|
||||
|
||||
__all__ = (
|
||||
'JobSerializer',
|
||||
@@ -18,11 +23,28 @@ class JobSerializer(BaseModelSerializer):
|
||||
object_type = ContentTypeField(
|
||||
read_only=True
|
||||
)
|
||||
object = serializers.SerializerMethodField(
|
||||
read_only=True
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Job
|
||||
fields = [
|
||||
'id', 'url', 'display_url', 'display', 'object_type', 'object_id', 'name', 'status', 'created', 'scheduled',
|
||||
'interval', 'started', 'completed', 'user', 'data', 'error', 'job_id', 'log_entries',
|
||||
'id', 'url', 'display_url', 'display', 'object_type', 'object_id', 'object', 'name', 'status', 'created',
|
||||
'scheduled', 'interval', 'started', 'completed', 'user', 'data', 'error', 'job_id', 'log_entries',
|
||||
]
|
||||
brief_fields = ('url', 'created', 'completed', 'user', 'status')
|
||||
|
||||
@extend_schema_field(serializers.JSONField(allow_null=True))
|
||||
def get_object(self, obj):
|
||||
"""
|
||||
Serialize a nested representation of the object.
|
||||
"""
|
||||
if obj.object is None:
|
||||
return None
|
||||
try:
|
||||
serializer = get_serializer_for_model(obj.object)
|
||||
except SerializerNotFound:
|
||||
return obj.object_repr
|
||||
context = {'request': self.context['request']}
|
||||
return serializer(obj.object, nested=True, context=context).data
|
||||
|
||||
@@ -80,6 +80,10 @@ class JobFilterSet(BaseFilterSet):
|
||||
method='search',
|
||||
label=_('Search'),
|
||||
)
|
||||
object_type_id = django_filters.ModelMultipleChoiceFilter(
|
||||
queryset=ObjectType.objects.with_feature('jobs'),
|
||||
field_name='object_type_id',
|
||||
)
|
||||
object_type = ContentTypeFilter()
|
||||
created = django_filters.DateTimeFilter()
|
||||
created__before = django_filters.DateTimeFilter(
|
||||
@@ -124,7 +128,7 @@ class JobFilterSet(BaseFilterSet):
|
||||
|
||||
class Meta:
|
||||
model = Job
|
||||
fields = ('id', 'object_type', 'object_id', 'name', 'interval', 'status', 'user', 'job_id')
|
||||
fields = ('id', 'object_type', 'object_type_id', 'object_id', 'name', 'interval', 'status', 'user', 'job_id')
|
||||
|
||||
def search(self, queryset, name, value):
|
||||
if not value.strip():
|
||||
|
||||
@@ -70,13 +70,13 @@ class JobFilterForm(SavedFiltersMixin, FilterForm):
|
||||
model = Job
|
||||
fieldsets = (
|
||||
FieldSet('q', 'filter_id'),
|
||||
FieldSet('object_type', 'status', name=_('Attributes')),
|
||||
FieldSet('object_type_id', 'status', name=_('Attributes')),
|
||||
FieldSet(
|
||||
'created__before', 'created__after', 'scheduled__before', 'scheduled__after', 'started__before',
|
||||
'started__after', 'completed__before', 'completed__after', 'user', name=_('Creation')
|
||||
),
|
||||
)
|
||||
object_type = ContentTypeChoiceField(
|
||||
object_type_id = ContentTypeChoiceField(
|
||||
label=_('Object Type'),
|
||||
queryset=ObjectType.objects.with_feature('jobs'),
|
||||
required=False,
|
||||
|
||||
@@ -166,8 +166,8 @@ class ConfigRevisionForm(forms.ModelForm, metaclass=ConfigFormMetaclass):
|
||||
FieldSet('CUSTOM_VALIDATORS', 'PROTECTION_RULES', name=_('Validation')),
|
||||
FieldSet('DEFAULT_USER_PREFERENCES', name=_('User Preferences')),
|
||||
FieldSet(
|
||||
'MAINTENANCE_MODE', 'GRAPHQL_ENABLED', 'CHANGELOG_RETENTION', 'JOB_RETENTION', 'MAPS_URL',
|
||||
name=_('Miscellaneous')
|
||||
'MAINTENANCE_MODE', 'COPILOT_ENABLED', 'GRAPHQL_ENABLED', 'CHANGELOG_RETENTION', 'JOB_RETENTION',
|
||||
'MAPS_URL', name=_('Miscellaneous'),
|
||||
),
|
||||
FieldSet('comment', name=_('Config Revision'))
|
||||
)
|
||||
|
||||
@@ -6,7 +6,6 @@ from django.conf import settings
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db import models
|
||||
from django.core.files.storage import storages
|
||||
from django.urls import reverse
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
from ..choices import ManagedFileRootPathChoices
|
||||
@@ -64,9 +63,6 @@ class ManagedFile(SyncedDataMixin, models.Model):
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
def get_absolute_url(self):
|
||||
return reverse('core:managedfile', args=[self.pk])
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self.file_path
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from django.utils.translation import gettext as _
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from netbox.object_actions import ObjectAction
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@ from threading import local
|
||||
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.core.exceptions import ObjectDoesNotExist, ValidationError
|
||||
from django.db.models import CASCADE
|
||||
from django.db.models.fields.reverse_related import ManyToManyRel, ManyToOneRel
|
||||
from django.db.models.signals import m2m_changed, post_migrate, post_save, pre_delete
|
||||
from django.dispatch import receiver, Signal
|
||||
@@ -220,14 +221,8 @@ def handle_deleted_object(sender, instance, **kwargs):
|
||||
obj.snapshot() # Ensure the change record includes the "before" state
|
||||
if type(relation) is ManyToManyRel:
|
||||
getattr(obj, related_field_name).remove(instance)
|
||||
elif type(relation) is ManyToOneRel and relation.field.null is True:
|
||||
elif type(relation) is ManyToOneRel and relation.null and relation.on_delete is not CASCADE:
|
||||
setattr(obj, related_field_name, None)
|
||||
# make sure the object hasn't been deleted - in case of
|
||||
# deletion chaining of related objects
|
||||
try:
|
||||
obj.refresh_from_db()
|
||||
except DoesNotExist:
|
||||
continue
|
||||
obj.save()
|
||||
|
||||
# Enqueue the object for event processing
|
||||
|
||||
@@ -5,14 +5,16 @@ from rest_framework import status
|
||||
|
||||
from core.choices import ObjectChangeActionChoices
|
||||
from core.models import ObjectChange, ObjectType
|
||||
from dcim.choices import SiteStatusChoices
|
||||
from dcim.models import Site, CableTermination, Device, DeviceType, DeviceRole, Interface, Cable
|
||||
from dcim.choices import InterfaceTypeChoices, ModuleStatusChoices, SiteStatusChoices
|
||||
from dcim.models import (
|
||||
Cable, CableTermination, Device, DeviceRole, DeviceType, Manufacturer, Module, ModuleBay, ModuleType, Interface,
|
||||
Site,
|
||||
)
|
||||
from extras.choices import *
|
||||
from extras.models import CustomField, CustomFieldChoiceSet, Tag
|
||||
from utilities.testing import APITestCase
|
||||
from utilities.testing.utils import create_tags, post_data
|
||||
from utilities.testing.utils import create_tags, create_test_device, post_data
|
||||
from utilities.testing.views import ModelViewTestCase
|
||||
from dcim.models import Manufacturer
|
||||
|
||||
|
||||
class ChangeLogViewTest(ModelViewTestCase):
|
||||
@@ -622,3 +624,64 @@ class ChangeLogAPITest(APITestCase):
|
||||
self.assertEqual(objectchange.prechange_data['name'], 'Site 1')
|
||||
self.assertEqual(objectchange.prechange_data['slug'], 'site-1')
|
||||
self.assertEqual(objectchange.postchange_data, None)
|
||||
|
||||
def test_deletion_ordering(self):
|
||||
"""
|
||||
Check that the cascading deletion of dependent objects is recorded in the correct order.
|
||||
"""
|
||||
device = create_test_device('device1')
|
||||
module_bay = ModuleBay.objects.create(device=device, name='Module Bay 1')
|
||||
module_type = ModuleType.objects.create(manufacturer=Manufacturer.objects.first(), model='Module Type 1')
|
||||
self.add_permissions('dcim.add_module', 'dcim.add_interface', 'dcim.delete_module')
|
||||
self.assertEqual(ObjectChange.objects.count(), 0) # Sanity check
|
||||
|
||||
# Create a new Module
|
||||
data = {
|
||||
'device': device.pk,
|
||||
'module_bay': module_bay.pk,
|
||||
'module_type': module_type.pk,
|
||||
'status': ModuleStatusChoices.STATUS_ACTIVE,
|
||||
}
|
||||
url = reverse('dcim-api:module-list')
|
||||
response = self.client.post(url, data, format='json', **self.header)
|
||||
self.assertHttpStatus(response, status.HTTP_201_CREATED)
|
||||
module = device.modules.first()
|
||||
|
||||
# Create an Interface on the Module
|
||||
data = {
|
||||
'device': device.pk,
|
||||
'module': module.pk,
|
||||
'name': 'Interface 1',
|
||||
'type': InterfaceTypeChoices.TYPE_1GE_FIXED,
|
||||
}
|
||||
url = reverse('dcim-api:interface-list')
|
||||
response = self.client.post(url, data, format='json', **self.header)
|
||||
self.assertHttpStatus(response, status.HTTP_201_CREATED)
|
||||
interface = device.interfaces.first()
|
||||
|
||||
# Delete the Module
|
||||
url = reverse('dcim-api:module-detail', kwargs={'pk': module.pk})
|
||||
response = self.client.delete(url, **self.header)
|
||||
self.assertHttpStatus(response, status.HTTP_204_NO_CONTENT)
|
||||
self.assertEqual(Module.objects.count(), 0)
|
||||
self.assertEqual(Interface.objects.count(), 0)
|
||||
|
||||
# Verify the creation of the expected ObjectChange records. We should see four total records, in this order:
|
||||
# 1. Module created
|
||||
# 2. Interface created
|
||||
# 3. Interface deleted
|
||||
# 4. Module deleted
|
||||
changes = ObjectChange.objects.order_by('time')
|
||||
self.assertEqual(len(changes), 4)
|
||||
self.assertEqual(changes[0].changed_object_type, ContentType.objects.get_for_model(Module))
|
||||
self.assertEqual(changes[0].changed_object_id, module.pk)
|
||||
self.assertEqual(changes[0].action, ObjectChangeActionChoices.ACTION_CREATE)
|
||||
self.assertEqual(changes[1].changed_object_type, ContentType.objects.get_for_model(Interface))
|
||||
self.assertEqual(changes[1].changed_object_id, interface.pk)
|
||||
self.assertEqual(changes[1].action, ObjectChangeActionChoices.ACTION_CREATE)
|
||||
self.assertEqual(changes[2].changed_object_type, ContentType.objects.get_for_model(Interface))
|
||||
self.assertEqual(changes[2].changed_object_id, interface.pk)
|
||||
self.assertEqual(changes[2].action, ObjectChangeActionChoices.ACTION_DELETE)
|
||||
self.assertEqual(changes[3].changed_object_type, ContentType.objects.get_for_model(Module))
|
||||
self.assertEqual(changes[3].changed_object_id, module.pk)
|
||||
self.assertEqual(changes[3].action, ObjectChangeActionChoices.ACTION_DELETE)
|
||||
|
||||
108
netbox/core/tests/test_openapi_schema.py
Normal file
108
netbox/core/tests/test_openapi_schema.py
Normal file
@@ -0,0 +1,108 @@
|
||||
"""
|
||||
Unit tests for OpenAPI schema generation.
|
||||
|
||||
Refs: #20638
|
||||
"""
|
||||
import json
|
||||
from django.test import TestCase
|
||||
|
||||
|
||||
class OpenAPISchemaTestCase(TestCase):
|
||||
"""Tests for OpenAPI schema generation."""
|
||||
|
||||
def setUp(self):
|
||||
"""Fetch schema via API endpoint."""
|
||||
response = self.client.get('/api/schema/', {'format': 'json'})
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.schema = json.loads(response.content)
|
||||
|
||||
def test_post_operation_documents_single_or_array(self):
|
||||
"""
|
||||
POST operations on NetBoxModelViewSet endpoints should document
|
||||
support for both single objects and arrays via oneOf.
|
||||
|
||||
Refs: #20638
|
||||
"""
|
||||
# Test representative endpoints across different apps
|
||||
test_paths = [
|
||||
'/api/core/data-sources/',
|
||||
'/api/dcim/sites/',
|
||||
'/api/users/users/',
|
||||
'/api/ipam/ip-addresses/',
|
||||
]
|
||||
|
||||
for path in test_paths:
|
||||
with self.subTest(path=path):
|
||||
operation = self.schema['paths'][path]['post']
|
||||
|
||||
# Get the request body schema
|
||||
request_schema = operation['requestBody']['content']['application/json']['schema']
|
||||
|
||||
# Should have oneOf with two options
|
||||
self.assertIn('oneOf', request_schema, f"POST {path} should have oneOf schema")
|
||||
self.assertEqual(
|
||||
len(request_schema['oneOf']), 2,
|
||||
f"POST {path} oneOf should have exactly 2 options"
|
||||
)
|
||||
|
||||
# First option: single object (has $ref or properties)
|
||||
single_schema = request_schema['oneOf'][0]
|
||||
self.assertTrue(
|
||||
'$ref' in single_schema or 'properties' in single_schema,
|
||||
f"POST {path} first oneOf option should be single object"
|
||||
)
|
||||
|
||||
# Second option: array of objects
|
||||
array_schema = request_schema['oneOf'][1]
|
||||
self.assertEqual(
|
||||
array_schema['type'], 'array',
|
||||
f"POST {path} second oneOf option should be array"
|
||||
)
|
||||
self.assertIn('items', array_schema, f"POST {path} array should have items")
|
||||
|
||||
def test_bulk_update_operations_require_array_only(self):
|
||||
"""
|
||||
Bulk update/patch operations should require arrays only, not oneOf.
|
||||
They don't support single object input.
|
||||
|
||||
Refs: #20638
|
||||
"""
|
||||
test_paths = [
|
||||
'/api/dcim/sites/',
|
||||
'/api/users/users/',
|
||||
]
|
||||
|
||||
for path in test_paths:
|
||||
for method in ['put', 'patch']:
|
||||
with self.subTest(path=path, method=method):
|
||||
operation = self.schema['paths'][path][method]
|
||||
request_schema = operation['requestBody']['content']['application/json']['schema']
|
||||
|
||||
# Should be array-only, not oneOf
|
||||
self.assertNotIn(
|
||||
'oneOf', request_schema,
|
||||
f"{method.upper()} {path} should NOT have oneOf (array-only)"
|
||||
)
|
||||
self.assertEqual(
|
||||
request_schema['type'], 'array',
|
||||
f"{method.upper()} {path} should require array"
|
||||
)
|
||||
self.assertIn(
|
||||
'items', request_schema,
|
||||
f"{method.upper()} {path} array should have items"
|
||||
)
|
||||
|
||||
def test_bulk_delete_requires_array(self):
|
||||
"""
|
||||
Bulk delete operations should require arrays.
|
||||
|
||||
Refs: #20638
|
||||
"""
|
||||
path = '/api/dcim/sites/'
|
||||
operation = self.schema['paths'][path]['delete']
|
||||
request_schema = operation['requestBody']['content']['application/json']['schema']
|
||||
|
||||
# Should be array-only
|
||||
self.assertNotIn('oneOf', request_schema, "DELETE should NOT have oneOf")
|
||||
self.assertEqual(request_schema['type'], 'array', "DELETE should require array")
|
||||
self.assertIn('items', request_schema, "DELETE array should have items")
|
||||
@@ -125,6 +125,7 @@ class DataSourceBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(DataSource, 'bulk_rename', path='rename', detail=False)
|
||||
class DataSourceBulkRenameView(generic.BulkRenameView):
|
||||
queryset = DataSource.objects.all()
|
||||
filterset = filtersets.DataSourceFilterSet
|
||||
|
||||
|
||||
@register_model_view(DataSource, 'bulk_delete', path='delete', detail=False)
|
||||
|
||||
@@ -461,6 +461,7 @@ class PowerPortTypeChoices(ChoiceSet):
|
||||
# Molex
|
||||
TYPE_MOLEX_MICRO_FIT_1X2 = 'molex-micro-fit-1x2'
|
||||
TYPE_MOLEX_MICRO_FIT_2X2 = 'molex-micro-fit-2x2'
|
||||
TYPE_MOLEX_MICRO_FIT_2X3 = 'molex-micro-fit-2x3'
|
||||
TYPE_MOLEX_MICRO_FIT_2X4 = 'molex-micro-fit-2x4'
|
||||
# Direct current (DC)
|
||||
TYPE_DC = 'dc-terminal'
|
||||
@@ -588,6 +589,7 @@ class PowerPortTypeChoices(ChoiceSet):
|
||||
('Molex', (
|
||||
(TYPE_MOLEX_MICRO_FIT_1X2, 'Molex Micro-Fit 1x2'),
|
||||
(TYPE_MOLEX_MICRO_FIT_2X2, 'Molex Micro-Fit 2x2'),
|
||||
(TYPE_MOLEX_MICRO_FIT_2X3, 'Molex Micro-Fit 2x3'),
|
||||
(TYPE_MOLEX_MICRO_FIT_2X4, 'Molex Micro-Fit 2x4'),
|
||||
)),
|
||||
('DC', (
|
||||
@@ -710,6 +712,7 @@ class PowerOutletTypeChoices(ChoiceSet):
|
||||
# Molex
|
||||
TYPE_MOLEX_MICRO_FIT_1X2 = 'molex-micro-fit-1x2'
|
||||
TYPE_MOLEX_MICRO_FIT_2X2 = 'molex-micro-fit-2x2'
|
||||
TYPE_MOLEX_MICRO_FIT_2X3 = 'molex-micro-fit-2x3'
|
||||
TYPE_MOLEX_MICRO_FIT_2X4 = 'molex-micro-fit-2x4'
|
||||
# Direct current (DC)
|
||||
TYPE_DC = 'dc-terminal'
|
||||
@@ -831,6 +834,7 @@ class PowerOutletTypeChoices(ChoiceSet):
|
||||
('Molex', (
|
||||
(TYPE_MOLEX_MICRO_FIT_1X2, 'Molex Micro-Fit 1x2'),
|
||||
(TYPE_MOLEX_MICRO_FIT_2X2, 'Molex Micro-Fit 2x2'),
|
||||
(TYPE_MOLEX_MICRO_FIT_2X3, 'Molex Micro-Fit 2x3'),
|
||||
(TYPE_MOLEX_MICRO_FIT_2X4, 'Molex Micro-Fit 2x4'),
|
||||
)),
|
||||
('DC', (
|
||||
@@ -1736,6 +1740,15 @@ class CableTypeChoices(ChoiceSet):
|
||||
|
||||
# Copper - Coaxial
|
||||
TYPE_COAXIAL = 'coaxial'
|
||||
TYPE_RG_6 = 'rg-6'
|
||||
TYPE_RG_8 = 'rg-8'
|
||||
TYPE_RG_11 = 'rg-11'
|
||||
TYPE_RG_59 = 'rg-59'
|
||||
TYPE_RG_62 = 'rg-62'
|
||||
TYPE_RG_213 = 'rg-213'
|
||||
TYPE_LMR_100 = 'lmr-100'
|
||||
TYPE_LMR_200 = 'lmr-200'
|
||||
TYPE_LMR_400 = 'lmr-400'
|
||||
|
||||
# Fiber Optic - Multimode
|
||||
TYPE_MMF = 'mmf'
|
||||
@@ -1785,6 +1798,15 @@ class CableTypeChoices(ChoiceSet):
|
||||
_('Copper - Coaxial'),
|
||||
(
|
||||
(TYPE_COAXIAL, 'Coaxial'),
|
||||
(TYPE_RG_6, 'RG-6'),
|
||||
(TYPE_RG_8, 'RG-8'),
|
||||
(TYPE_RG_11, 'RG-11'),
|
||||
(TYPE_RG_59, 'RG-59'),
|
||||
(TYPE_RG_62, 'RG-62'),
|
||||
(TYPE_RG_213, 'RG-213'),
|
||||
(TYPE_LMR_100, 'LMR-100'),
|
||||
(TYPE_LMR_200, 'LMR-200'),
|
||||
(TYPE_LMR_400, 'LMR-400'),
|
||||
),
|
||||
),
|
||||
(
|
||||
|
||||
@@ -14,16 +14,16 @@ from netbox.filtersets import (
|
||||
AttributeFiltersMixin, BaseFilterSet, ChangeLoggedModelFilterSet, NestedGroupModelFilterSet, NetBoxModelFilterSet,
|
||||
OrganizationalModelFilterSet,
|
||||
)
|
||||
from tenancy.filtersets import TenancyFilterSet, ContactModelFilterSet
|
||||
from tenancy.filtersets import ContactModelFilterSet, TenancyFilterSet
|
||||
from tenancy.models import *
|
||||
from users.models import User
|
||||
from utilities.filters import (
|
||||
ContentTypeFilter, MultiValueCharFilter, MultiValueMACAddressFilter, MultiValueNumberFilter, MultiValueWWNFilter,
|
||||
NumericArrayFilter, TreeNodeMultipleChoiceFilter,
|
||||
)
|
||||
from virtualization.models import Cluster, ClusterGroup, VMInterface, VirtualMachine
|
||||
from virtualization.models import Cluster, ClusterGroup, VirtualMachine, VMInterface
|
||||
from vpn.models import L2VPN
|
||||
from wireless.choices import WirelessRoleChoices, WirelessChannelChoices
|
||||
from wireless.choices import WirelessChannelChoices, WirelessRoleChoices
|
||||
from wireless.models import WirelessLAN, WirelessLink
|
||||
from .choices import *
|
||||
from .constants import *
|
||||
@@ -1288,7 +1288,6 @@ class DeviceFilterSet(
|
||||
Q(name__icontains=value) |
|
||||
Q(virtual_chassis__name__icontains=value) |
|
||||
Q(serial__icontains=value.strip()) |
|
||||
Q(inventoryitems__serial__icontains=value.strip()) |
|
||||
Q(asset_tag__icontains=value.strip()) |
|
||||
Q(description__icontains=value.strip()) |
|
||||
Q(comments__icontains=value) |
|
||||
@@ -1807,6 +1806,14 @@ class MACAddressFilterSet(NetBoxModelFilterSet):
|
||||
queryset=VMInterface.objects.all(),
|
||||
label=_('VM interface (ID)'),
|
||||
)
|
||||
assigned = django_filters.BooleanFilter(
|
||||
method='filter_assigned',
|
||||
label=_('Is assigned'),
|
||||
)
|
||||
primary = django_filters.BooleanFilter(
|
||||
method='filter_primary',
|
||||
label=_('Is primary'),
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = MACAddress
|
||||
@@ -1843,6 +1850,29 @@ class MACAddressFilterSet(NetBoxModelFilterSet):
|
||||
vminterface__in=interface_ids
|
||||
)
|
||||
|
||||
def filter_assigned(self, queryset, name, value):
|
||||
params = {
|
||||
'assigned_object_type__isnull': True,
|
||||
'assigned_object_id__isnull': True,
|
||||
}
|
||||
if value:
|
||||
return queryset.exclude(**params)
|
||||
else:
|
||||
return queryset.filter(**params)
|
||||
|
||||
def filter_primary(self, queryset, name, value):
|
||||
interface_mac_ids = Interface.objects.filter(primary_mac_address_id__isnull=False).values_list(
|
||||
'primary_mac_address_id', flat=True
|
||||
)
|
||||
vminterface_mac_ids = VMInterface.objects.filter(primary_mac_address_id__isnull=False).values_list(
|
||||
'primary_mac_address_id', flat=True
|
||||
)
|
||||
query = Q(pk__in=interface_mac_ids) | Q(pk__in=vminterface_mac_ids)
|
||||
if value:
|
||||
return queryset.filter(query)
|
||||
else:
|
||||
return queryset.exclude(query)
|
||||
|
||||
|
||||
class CommonInterfaceFilterSet(django_filters.FilterSet):
|
||||
mode = django_filters.MultipleChoiceFilter(
|
||||
|
||||
@@ -9,7 +9,8 @@ from dcim.choices import *
|
||||
from dcim.constants import *
|
||||
from dcim.models import *
|
||||
from extras.models import ConfigTemplate
|
||||
from ipam.models import VRF, IPAddress
|
||||
from ipam.choices import VLANQinQRoleChoices
|
||||
from ipam.models import VLAN, VRF, IPAddress, VLANGroup
|
||||
from netbox.choices import *
|
||||
from netbox.forms import NetBoxModelImportForm
|
||||
from tenancy.models import Tenant
|
||||
@@ -17,7 +18,7 @@ from utilities.forms.fields import (
|
||||
CSVChoiceField, CSVContentTypeField, CSVModelChoiceField, CSVModelMultipleChoiceField, CSVTypedChoiceField,
|
||||
SlugField,
|
||||
)
|
||||
from virtualization.models import Cluster, VMInterface, VirtualMachine
|
||||
from virtualization.models import Cluster, VirtualMachine, VMInterface
|
||||
from wireless.choices import WirelessRoleChoices
|
||||
from .common import ModuleCommonForm
|
||||
|
||||
@@ -471,14 +472,30 @@ class ModuleTypeImportForm(NetBoxModelImportForm):
|
||||
required=False,
|
||||
help_text=_('Unit for module weight')
|
||||
)
|
||||
attribute_data = forms.JSONField(
|
||||
label=_('Attributes'),
|
||||
required=False,
|
||||
help_text=_('Attribute values for the assigned profile, passed as a dictionary')
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = ModuleType
|
||||
fields = [
|
||||
'manufacturer', 'model', 'part_number', 'description', 'airflow', 'weight', 'weight_unit', 'profile',
|
||||
'comments', 'tags'
|
||||
'attribute_data', 'comments', 'tags',
|
||||
]
|
||||
|
||||
def clean(self):
|
||||
super().clean()
|
||||
|
||||
# Attribute data may be included only if a profile is specified
|
||||
if self.cleaned_data.get('attribute_data') and not self.cleaned_data.get('profile'):
|
||||
raise forms.ValidationError(_("Profile must be specified if attribute data is provided."))
|
||||
|
||||
# Default attribute_data to an empty dictionary if a profile is specified (to enforce schema validation)
|
||||
if self.cleaned_data.get('profile') and not self.cleaned_data.get('attribute_data'):
|
||||
self.cleaned_data['attribute_data'] = {}
|
||||
|
||||
|
||||
class DeviceRoleImportForm(NetBoxModelImportForm):
|
||||
parent = CSVModelChoiceField(
|
||||
@@ -938,7 +955,7 @@ class InterfaceImportForm(NetBoxModelImportForm):
|
||||
required=False,
|
||||
to_field_name='name',
|
||||
help_text=mark_safe(
|
||||
_('VDC names separated by commas, encased with double quotes. Example:') + ' <code>vdc1,vdc2,vdc3</code>'
|
||||
_('VDC names separated by commas, encased with double quotes. Example:') + ' <code>"vdc1,vdc2,vdc3"</code>'
|
||||
)
|
||||
)
|
||||
type = CSVChoiceField(
|
||||
@@ -967,7 +984,41 @@ class InterfaceImportForm(NetBoxModelImportForm):
|
||||
label=_('Mode'),
|
||||
choices=InterfaceModeChoices,
|
||||
required=False,
|
||||
help_text=_('IEEE 802.1Q operational mode (for L2 interfaces)')
|
||||
help_text=_('IEEE 802.1Q operational mode (for L2 interfaces)'),
|
||||
)
|
||||
vlan_group = CSVModelChoiceField(
|
||||
label=_('VLAN group'),
|
||||
queryset=VLANGroup.objects.all(),
|
||||
required=False,
|
||||
to_field_name='name',
|
||||
help_text=_('Filter VLANs available for assignment by group'),
|
||||
)
|
||||
untagged_vlan = CSVModelChoiceField(
|
||||
label=_('Untagged VLAN'),
|
||||
queryset=VLAN.objects.all(),
|
||||
required=False,
|
||||
to_field_name='vid',
|
||||
help_text=_('Assigned untagged VLAN ID (filtered by VLAN group)'),
|
||||
)
|
||||
tagged_vlans = CSVModelMultipleChoiceField(
|
||||
label=_('Tagged VLANs'),
|
||||
queryset=VLAN.objects.all(),
|
||||
required=False,
|
||||
to_field_name='vid',
|
||||
help_text=mark_safe(
|
||||
_(
|
||||
'Assigned tagged VLAN IDs separated by commas, encased with double quotes '
|
||||
'(filtered by VLAN group). Example:'
|
||||
)
|
||||
+ ' <code>"100,200,300"</code>'
|
||||
),
|
||||
)
|
||||
qinq_svlan = CSVModelChoiceField(
|
||||
label=_('Q-in-Q Service VLAN'),
|
||||
queryset=VLAN.objects.filter(qinq_role=VLANQinQRoleChoices.ROLE_SERVICE),
|
||||
required=False,
|
||||
to_field_name='vid',
|
||||
help_text=_('Assigned Q-in-Q Service VLAN ID (filtered by VLAN group)'),
|
||||
)
|
||||
vrf = CSVModelChoiceField(
|
||||
label=_('VRF'),
|
||||
@@ -988,7 +1039,8 @@ class InterfaceImportForm(NetBoxModelImportForm):
|
||||
fields = (
|
||||
'device', 'name', 'label', 'parent', 'bridge', 'lag', 'type', 'speed', 'duplex', 'enabled',
|
||||
'mark_connected', 'wwn', 'vdcs', 'mtu', 'mgmt_only', 'description', 'poe_mode', 'poe_type', 'mode',
|
||||
'vrf', 'rf_role', 'rf_channel', 'rf_channel_frequency', 'rf_channel_width', 'tx_power', 'tags'
|
||||
'vlan_group', 'untagged_vlan', 'tagged_vlans', 'qinq_svlan', 'vrf', 'rf_role', 'rf_channel',
|
||||
'rf_channel_frequency', 'rf_channel_width', 'tx_power', 'tags'
|
||||
)
|
||||
|
||||
def __init__(self, data=None, *args, **kwargs):
|
||||
@@ -1005,6 +1057,13 @@ class InterfaceImportForm(NetBoxModelImportForm):
|
||||
self.fields['lag'].queryset = self.fields['lag'].queryset.filter(**params)
|
||||
self.fields['vdcs'].queryset = self.fields['vdcs'].queryset.filter(**params)
|
||||
|
||||
# Limit choices for VLANs to the assigned VLAN group
|
||||
if vlan_group := data.get('vlan_group'):
|
||||
params = {f"group__{self.fields['vlan_group'].to_field_name}": vlan_group}
|
||||
self.fields['untagged_vlan'].queryset = self.fields['untagged_vlan'].queryset.filter(**params)
|
||||
self.fields['tagged_vlans'].queryset = self.fields['tagged_vlans'].queryset.filter(**params)
|
||||
self.fields['qinq_svlan'].queryset = self.fields['qinq_svlan'].queryset.filter(**params)
|
||||
|
||||
def clean_enabled(self):
|
||||
# Make sure enabled is True when it's not included in the uploaded data
|
||||
if 'enabled' not in self.data:
|
||||
|
||||
@@ -278,11 +278,6 @@ class RackBaseFilterForm(NetBoxModelFilterSetForm):
|
||||
choices=BOOLEAN_WITH_BLANK_CHOICES
|
||||
)
|
||||
)
|
||||
airflow = forms.MultipleChoiceField(
|
||||
label=_('Airflow'),
|
||||
choices=add_blank_choice(RackAirflowChoices),
|
||||
required=False
|
||||
)
|
||||
weight = forms.DecimalField(
|
||||
label=_('Weight'),
|
||||
required=False,
|
||||
@@ -381,6 +376,11 @@ class RackFilterForm(TenancyFilterForm, ContactModelFilterForm, RackBaseFilterFo
|
||||
},
|
||||
label=_('Rack type')
|
||||
)
|
||||
airflow = forms.MultipleChoiceField(
|
||||
label=_('Airflow'),
|
||||
choices=add_blank_choice(RackAirflowChoices),
|
||||
required=False
|
||||
)
|
||||
serial = forms.CharField(
|
||||
label=_('Serial'),
|
||||
required=False
|
||||
@@ -1676,12 +1676,16 @@ class MACAddressFilterForm(NetBoxModelFilterSetForm):
|
||||
model = MACAddress
|
||||
fieldsets = (
|
||||
FieldSet('q', 'filter_id', 'tag'),
|
||||
FieldSet('mac_address', 'device_id', 'virtual_machine_id', name=_('MAC address')),
|
||||
FieldSet('mac_address', name=_('Attributes')),
|
||||
FieldSet(
|
||||
'device_id', 'virtual_machine_id', 'assigned', 'primary',
|
||||
name=_('Assignments'),
|
||||
),
|
||||
)
|
||||
selector_fields = ('filter_id', 'q', 'device_id', 'virtual_machine_id')
|
||||
mac_address = forms.CharField(
|
||||
required=False,
|
||||
label=_('MAC address')
|
||||
label=_('MAC address'),
|
||||
)
|
||||
device_id = DynamicModelMultipleChoiceField(
|
||||
queryset=Device.objects.all(),
|
||||
@@ -1693,6 +1697,20 @@ class MACAddressFilterForm(NetBoxModelFilterSetForm):
|
||||
required=False,
|
||||
label=_('Assigned VM'),
|
||||
)
|
||||
assigned = forms.NullBooleanField(
|
||||
required=False,
|
||||
label=_('Assigned to an interface'),
|
||||
widget=forms.Select(
|
||||
choices=BOOLEAN_WITH_BLANK_CHOICES
|
||||
),
|
||||
)
|
||||
primary = forms.NullBooleanField(
|
||||
required=False,
|
||||
label=_('Primary MAC of an interface'),
|
||||
widget=forms.Select(
|
||||
choices=BOOLEAN_WITH_BLANK_CHOICES
|
||||
),
|
||||
)
|
||||
tag = TagFilterField(model)
|
||||
|
||||
|
||||
|
||||
@@ -269,7 +269,8 @@ class RackForm(TenancyForm, NetBoxModelForm):
|
||||
label=_('Rack Type'),
|
||||
queryset=RackType.objects.all(),
|
||||
required=False,
|
||||
help_text=_("Select a pre-defined rack type, or set physical characteristics below.")
|
||||
selector=True,
|
||||
help_text=_("Select a pre-defined rack type, or set physical characteristics below."),
|
||||
)
|
||||
comments = CommentField()
|
||||
|
||||
@@ -755,7 +756,10 @@ class ModuleForm(ModuleCommonForm, NetBoxModelForm):
|
||||
queryset=ModuleBay.objects.all(),
|
||||
query_params={
|
||||
'device_id': '$device'
|
||||
}
|
||||
},
|
||||
context={
|
||||
'disabled': 'installed_module',
|
||||
},
|
||||
)
|
||||
module_type = DynamicModelChoiceField(
|
||||
label=_('Module type'),
|
||||
|
||||
@@ -453,6 +453,7 @@ class VirtualChassisCreateForm(NetBoxModelForm):
|
||||
if instance.pk and self.cleaned_data['members']:
|
||||
initial_position = self.cleaned_data.get('initial_position', 1)
|
||||
for i, member in enumerate(self.cleaned_data['members'], start=initial_position):
|
||||
member.snapshot()
|
||||
member.virtual_chassis = instance
|
||||
member.vc_position = i
|
||||
member.save()
|
||||
|
||||
@@ -18,7 +18,9 @@ from netbox.graphql.filter_mixins import (
|
||||
ImageAttachmentFilterMixin,
|
||||
WeightFilterMixin,
|
||||
)
|
||||
from tenancy.graphql.filter_mixins import TenancyFilterMixin, ContactFilterMixin
|
||||
from tenancy.graphql.filter_mixins import ContactFilterMixin, TenancyFilterMixin
|
||||
from virtualization.models import VMInterface
|
||||
|
||||
from .filter_mixins import (
|
||||
CabledObjectModelFilterMixin,
|
||||
ComponentModelFilterMixin,
|
||||
@@ -419,6 +421,24 @@ class MACAddressFilter(PrimaryModelFilterMixin):
|
||||
)
|
||||
assigned_object_id: ID | None = strawberry_django.filter_field()
|
||||
|
||||
@strawberry_django.filter_field()
|
||||
def assigned(self, value: bool, prefix) -> Q:
|
||||
return Q(**{f'{prefix}assigned_object_id__isnull': (not value)})
|
||||
|
||||
@strawberry_django.filter_field()
|
||||
def primary(self, value: bool, prefix) -> Q:
|
||||
interface_mac_ids = models.Interface.objects.filter(primary_mac_address_id__isnull=False).values_list(
|
||||
'primary_mac_address_id', flat=True
|
||||
)
|
||||
vminterface_mac_ids = VMInterface.objects.filter(primary_mac_address_id__isnull=False).values_list(
|
||||
'primary_mac_address_id', flat=True
|
||||
)
|
||||
query = Q(**{f'{prefix}pk__in': interface_mac_ids}) | Q(**{f'{prefix}pk__in': vminterface_mac_ids})
|
||||
if value:
|
||||
return Q(query)
|
||||
else:
|
||||
return ~Q(query)
|
||||
|
||||
|
||||
@strawberry_django.filter_type(models.Interface, lookups=True)
|
||||
class InterfaceFilter(ModularComponentModelFilterMixin, InterfaceBaseFilterMixin, CabledObjectModelFilterMixin):
|
||||
|
||||
69
netbox/dcim/migrations/0216_latitude_longitude_validators.py
Normal file
69
netbox/dcim/migrations/0216_latitude_longitude_validators.py
Normal file
@@ -0,0 +1,69 @@
|
||||
import decimal
|
||||
|
||||
import django.core.validators
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('dcim', '0215_rackreservation_status'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='device',
|
||||
name='latitude',
|
||||
field=models.DecimalField(
|
||||
blank=True,
|
||||
decimal_places=6,
|
||||
max_digits=8,
|
||||
null=True,
|
||||
validators=[
|
||||
django.core.validators.MinValueValidator(decimal.Decimal('-90.0')),
|
||||
django.core.validators.MaxValueValidator(decimal.Decimal('90.0'))
|
||||
],
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='device',
|
||||
name='longitude',
|
||||
field=models.DecimalField(
|
||||
blank=True,
|
||||
decimal_places=6,
|
||||
max_digits=9,
|
||||
null=True,
|
||||
validators=[
|
||||
django.core.validators.MinValueValidator(decimal.Decimal('-180.0')),
|
||||
django.core.validators.MaxValueValidator(decimal.Decimal('180.0'))
|
||||
],
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='site',
|
||||
name='latitude',
|
||||
field=models.DecimalField(
|
||||
blank=True,
|
||||
decimal_places=6,
|
||||
max_digits=8,
|
||||
null=True,
|
||||
validators=[
|
||||
django.core.validators.MinValueValidator(decimal.Decimal('-90.0')),
|
||||
django.core.validators.MaxValueValidator(decimal.Decimal('90.0'))
|
||||
],
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='site',
|
||||
name='longitude',
|
||||
field=models.DecimalField(
|
||||
blank=True,
|
||||
decimal_places=6,
|
||||
max_digits=9,
|
||||
null=True,
|
||||
validators=[
|
||||
django.core.validators.MinValueValidator(decimal.Decimal('-180.0')),
|
||||
django.core.validators.MaxValueValidator(decimal.Decimal('180.0'))
|
||||
],
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -10,6 +10,7 @@ from django.utils.translation import gettext_lazy as _
|
||||
from core.models import ObjectType
|
||||
from dcim.choices import *
|
||||
from dcim.constants import *
|
||||
from dcim.exceptions import UnsupportedCablePath
|
||||
from dcim.fields import PathField
|
||||
from dcim.utils import decompile_path_node, object_to_path_node
|
||||
from netbox.choices import ColorChoices
|
||||
@@ -28,8 +29,6 @@ __all__ = (
|
||||
'CableTermination',
|
||||
)
|
||||
|
||||
from ..exceptions import UnsupportedCablePath
|
||||
|
||||
trace_paths = Signal()
|
||||
|
||||
|
||||
@@ -393,6 +392,17 @@ class CableTermination(ChangeLoggedModel):
|
||||
def clean(self):
|
||||
super().clean()
|
||||
|
||||
# Disallow connecting a cable to any termination object that is
|
||||
# explicitly flagged as "mark connected".
|
||||
termination = getattr(self, 'termination', None)
|
||||
if termination is not None and getattr(termination, "mark_connected", False):
|
||||
raise ValidationError(
|
||||
_("Cannot connect a cable to {obj_parent} > {obj} because it is marked as connected.").format(
|
||||
obj_parent=termination.parent_object,
|
||||
obj=termination,
|
||||
)
|
||||
)
|
||||
|
||||
# Check for existing termination
|
||||
qs = CableTermination.objects.filter(
|
||||
termination_type=self.termination_type,
|
||||
@@ -404,14 +414,14 @@ class CableTermination(ChangeLoggedModel):
|
||||
existing_termination = qs.first()
|
||||
if existing_termination is not None:
|
||||
raise ValidationError(
|
||||
_("Duplicate termination found for {app_label}.{model} {termination_id}: cable {cable_pk}".format(
|
||||
_("Duplicate termination found for {app_label}.{model} {termination_id}: cable {cable_pk}").format(
|
||||
app_label=self.termination_type.app_label,
|
||||
model=self.termination_type.model,
|
||||
termination_id=self.termination_id,
|
||||
cable_pk=existing_termination.cable.pk
|
||||
))
|
||||
)
|
||||
)
|
||||
# Validate interface type (if applicable)
|
||||
# Validate the interface type (if applicable)
|
||||
if self.termination_type.model == 'interface' and self.termination.type in NONCONNECTABLE_IFACE_TYPES:
|
||||
raise ValidationError(
|
||||
_("Cables cannot be terminated to {type_display} interfaces").format(
|
||||
@@ -604,7 +614,7 @@ class CablePath(models.Model):
|
||||
Cable or WirelessLink connects (interfaces, console ports, circuit termination, etc.). All terminations must be
|
||||
of the same type and must belong to the same parent object.
|
||||
"""
|
||||
from circuits.models import CircuitTermination
|
||||
from circuits.models import CircuitTermination, Circuit
|
||||
|
||||
if not terminations:
|
||||
return None
|
||||
@@ -626,8 +636,11 @@ class CablePath(models.Model):
|
||||
raise UnsupportedCablePath(_("All mid-span terminations must have the same termination type"))
|
||||
|
||||
# All mid-span terminations must all be attached to the same device
|
||||
if (not isinstance(terminations[0], PathEndpoint) and not
|
||||
all(t.parent_object == terminations[0].parent_object for t in terminations[1:])):
|
||||
if (
|
||||
not isinstance(terminations[0], PathEndpoint) and
|
||||
not isinstance(terminations[0].parent_object, Circuit) and
|
||||
not all(t.parent_object == terminations[0].parent_object for t in terminations[1:])
|
||||
):
|
||||
raise UnsupportedCablePath(_("All mid-span terminations must have the same parent object"))
|
||||
|
||||
# Check for a split path (e.g. rear port fanning out to multiple front ports with
|
||||
@@ -771,32 +784,39 @@ class CablePath(models.Model):
|
||||
|
||||
elif isinstance(remote_terminations[0], CircuitTermination):
|
||||
# Follow a CircuitTermination to its corresponding CircuitTermination (A to Z or vice versa)
|
||||
if len(remote_terminations) > 1:
|
||||
is_split = True
|
||||
qs = Q()
|
||||
for remote_termination in remote_terminations:
|
||||
qs |= Q(
|
||||
circuit=remote_termination.circuit,
|
||||
term_side='Z' if remote_termination.term_side == 'A' else 'A'
|
||||
)
|
||||
|
||||
# Get all circuit terminations
|
||||
circuit_terminations = CircuitTermination.objects.filter(qs)
|
||||
|
||||
if not circuit_terminations.exists():
|
||||
break
|
||||
circuit_termination = CircuitTermination.objects.filter(
|
||||
circuit=remote_terminations[0].circuit,
|
||||
term_side='Z' if remote_terminations[0].term_side == 'A' else 'A'
|
||||
).first()
|
||||
if circuit_termination is None:
|
||||
break
|
||||
elif circuit_termination._provider_network:
|
||||
elif all([ct._provider_network for ct in circuit_terminations]):
|
||||
# Circuit terminates to a ProviderNetwork
|
||||
path.extend([
|
||||
[object_to_path_node(circuit_termination)],
|
||||
[object_to_path_node(circuit_termination._provider_network)],
|
||||
[object_to_path_node(ct) for ct in circuit_terminations],
|
||||
[object_to_path_node(ct._provider_network) for ct in circuit_terminations],
|
||||
])
|
||||
is_complete = True
|
||||
break
|
||||
elif circuit_termination.termination and not circuit_termination.cable:
|
||||
elif all([ct.termination and not ct.cable for ct in circuit_terminations]):
|
||||
# Circuit terminates to a Region/Site/etc.
|
||||
path.extend([
|
||||
[object_to_path_node(circuit_termination)],
|
||||
[object_to_path_node(circuit_termination.termination)],
|
||||
[object_to_path_node(ct) for ct in circuit_terminations],
|
||||
[object_to_path_node(ct.termination) for ct in circuit_terminations],
|
||||
])
|
||||
break
|
||||
elif any([ct.cable in links for ct in circuit_terminations]):
|
||||
# No valid path
|
||||
is_split = True
|
||||
break
|
||||
|
||||
terminations = [circuit_termination]
|
||||
terminations = circuit_terminations
|
||||
|
||||
else:
|
||||
# Check for non-symmetric path
|
||||
|
||||
@@ -646,6 +646,10 @@ class Device(
|
||||
decimal_places=6,
|
||||
blank=True,
|
||||
null=True,
|
||||
validators=[
|
||||
MinValueValidator(decimal.Decimal('-90.0')),
|
||||
MaxValueValidator(decimal.Decimal('90.0'))
|
||||
],
|
||||
help_text=_("GPS coordinate in decimal format (xx.yyyyyy)")
|
||||
)
|
||||
longitude = models.DecimalField(
|
||||
@@ -654,6 +658,10 @@ class Device(
|
||||
decimal_places=6,
|
||||
blank=True,
|
||||
null=True,
|
||||
validators=[
|
||||
MinValueValidator(decimal.Decimal('-180.0')),
|
||||
MaxValueValidator(decimal.Decimal('180.0'))
|
||||
],
|
||||
help_text=_("GPS coordinate in decimal format (xx.yyyyyy)")
|
||||
)
|
||||
services = GenericRelation(
|
||||
@@ -1154,7 +1162,6 @@ class VirtualChassis(PrimaryModel):
|
||||
})
|
||||
|
||||
def delete(self, *args, **kwargs):
|
||||
|
||||
# Check for LAG interfaces split across member chassis
|
||||
interfaces = Interface.objects.filter(
|
||||
device__in=self.members.all(),
|
||||
@@ -1168,6 +1175,13 @@ class VirtualChassis(PrimaryModel):
|
||||
"interfaces."
|
||||
).format(self=self, interfaces=InterfaceSpeedChoices))
|
||||
|
||||
# Clear vc_position and vc_priority on member devices BEFORE calling super().delete()
|
||||
# This must be done here because on_delete=SET_NULL executes before pre_delete signal
|
||||
for device in self.members.all():
|
||||
device.vc_position = None
|
||||
device.vc_priority = None
|
||||
device.save()
|
||||
|
||||
return super().delete(*args, **kwargs)
|
||||
|
||||
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
import decimal
|
||||
|
||||
from django.contrib.contenttypes.fields import GenericRelation
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.core.validators import MaxValueValidator, MinValueValidator
|
||||
from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from timezone_field import TimeZoneField
|
||||
@@ -210,6 +213,10 @@ class Site(ContactsMixin, ImageAttachmentsMixin, PrimaryModel):
|
||||
decimal_places=6,
|
||||
blank=True,
|
||||
null=True,
|
||||
validators=[
|
||||
MinValueValidator(decimal.Decimal('-90.0')),
|
||||
MaxValueValidator(decimal.Decimal('90.0'))
|
||||
],
|
||||
help_text=_('GPS coordinate in decimal format (xx.yyyyyy)')
|
||||
)
|
||||
longitude = models.DecimalField(
|
||||
@@ -218,6 +225,10 @@ class Site(ContactsMixin, ImageAttachmentsMixin, PrimaryModel):
|
||||
decimal_places=6,
|
||||
blank=True,
|
||||
null=True,
|
||||
validators=[
|
||||
MinValueValidator(decimal.Decimal('-180.0')),
|
||||
MaxValueValidator(decimal.Decimal('180.0'))
|
||||
],
|
||||
help_text=_('GPS coordinate in decimal format (xx.yyyyyy)')
|
||||
)
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from django.utils.translation import gettext as _
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from netbox.object_actions import ObjectAction
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import logging
|
||||
|
||||
from django.db.models.signals import post_save, post_delete, pre_delete
|
||||
from django.db.models.signals import post_save, post_delete
|
||||
from django.dispatch import receiver
|
||||
|
||||
from dcim.choices import CableEndChoices, LinkStatusChoices
|
||||
@@ -85,18 +85,6 @@ def assign_virtualchassis_master(instance, created, **kwargs):
|
||||
master.save()
|
||||
|
||||
|
||||
@receiver(pre_delete, sender=VirtualChassis)
|
||||
def clear_virtualchassis_members(instance, **kwargs):
|
||||
"""
|
||||
When a VirtualChassis is deleted, nullify the vc_position and vc_priority fields of its prior members.
|
||||
"""
|
||||
devices = Device.objects.filter(virtual_chassis=instance.pk)
|
||||
for device in devices:
|
||||
device.vc_position = None
|
||||
device.vc_priority = None
|
||||
device.save()
|
||||
|
||||
|
||||
#
|
||||
# Cables
|
||||
#
|
||||
|
||||
@@ -1174,6 +1174,9 @@ class MACAddressTable(NetBoxTable):
|
||||
orderable=False,
|
||||
verbose_name=_('Parent')
|
||||
)
|
||||
is_primary = columns.BooleanColumn(
|
||||
verbose_name=_('Primary')
|
||||
)
|
||||
tags = columns.TagColumn(
|
||||
url_name='dcim:macaddress_list'
|
||||
)
|
||||
@@ -1184,7 +1187,7 @@ class MACAddressTable(NetBoxTable):
|
||||
class Meta(DeviceComponentTable.Meta):
|
||||
model = models.MACAddress
|
||||
fields = (
|
||||
'pk', 'id', 'mac_address', 'assigned_object_parent', 'assigned_object', 'description', 'comments', 'tags',
|
||||
'created', 'last_updated',
|
||||
'pk', 'id', 'mac_address', 'assigned_object_parent', 'assigned_object', 'description', 'is_primary',
|
||||
'comments', 'tags', 'created', 'last_updated',
|
||||
)
|
||||
default_columns = ('pk', 'mac_address', 'assigned_object_parent', 'assigned_object', 'description')
|
||||
|
||||
@@ -100,7 +100,7 @@ class RackTypeTable(NetBoxTable):
|
||||
model = RackType
|
||||
fields = (
|
||||
'pk', 'id', 'model', 'manufacturer', 'form_factor', 'u_height', 'starting_unit', 'width', 'outer_width',
|
||||
'outer_height', 'outer_depth', 'mounting_depth', 'airflow', 'weight', 'max_weight', 'description',
|
||||
'outer_height', 'outer_depth', 'mounting_depth', 'weight', 'max_weight', 'description',
|
||||
'comments', 'instance_count', 'tags', 'created', 'last_updated',
|
||||
)
|
||||
default_columns = (
|
||||
|
||||
@@ -2270,6 +2270,80 @@ class CablePathTestCase(TestCase):
|
||||
CableTraceSVG(interface1).render()
|
||||
CableTraceSVG(interface2).render()
|
||||
|
||||
def test_223_interface_to_interface_via_multiple_circuit_terminations(self):
|
||||
provider = Provider.objects.first()
|
||||
circuit_type = CircuitType.objects.first()
|
||||
circuit1 = self.circuit
|
||||
circuit2 = Circuit.objects.create(provider=provider, type=circuit_type, cid='Circuit 2')
|
||||
interface1 = Interface.objects.create(device=self.device, name='Interface 1')
|
||||
interface2 = Interface.objects.create(device=self.device, name='Interface 2')
|
||||
circuittermination1_A = CircuitTermination.objects.create(
|
||||
circuit=circuit1,
|
||||
termination=self.site,
|
||||
term_side='A'
|
||||
)
|
||||
circuittermination1_Z = CircuitTermination.objects.create(
|
||||
circuit=circuit1,
|
||||
termination=self.site,
|
||||
term_side='Z'
|
||||
)
|
||||
circuittermination2_A = CircuitTermination.objects.create(
|
||||
circuit=circuit2,
|
||||
termination=self.site,
|
||||
term_side='A'
|
||||
)
|
||||
circuittermination2_Z = CircuitTermination.objects.create(
|
||||
circuit=circuit2,
|
||||
termination=self.site,
|
||||
term_side='Z'
|
||||
)
|
||||
|
||||
# Create cables
|
||||
cable1 = Cable(
|
||||
a_terminations=[interface1],
|
||||
b_terminations=[circuittermination1_A, circuittermination2_A]
|
||||
)
|
||||
cable2 = Cable(
|
||||
a_terminations=[interface2],
|
||||
b_terminations=[circuittermination1_Z, circuittermination2_Z]
|
||||
)
|
||||
cable1.save()
|
||||
cable2.save()
|
||||
|
||||
self.assertEqual(CablePath.objects.count(), 2)
|
||||
|
||||
path1 = self.assertPathExists(
|
||||
(
|
||||
interface1,
|
||||
cable1,
|
||||
(circuittermination1_A, circuittermination2_A),
|
||||
(circuittermination1_Z, circuittermination2_Z),
|
||||
cable2,
|
||||
interface2
|
||||
|
||||
),
|
||||
is_active=True,
|
||||
is_complete=True,
|
||||
)
|
||||
interface1.refresh_from_db()
|
||||
self.assertPathIsSet(interface1, path1)
|
||||
|
||||
path2 = self.assertPathExists(
|
||||
(
|
||||
interface2,
|
||||
cable2,
|
||||
(circuittermination1_Z, circuittermination2_Z),
|
||||
(circuittermination1_A, circuittermination2_A),
|
||||
cable1,
|
||||
interface1
|
||||
|
||||
),
|
||||
is_active=True,
|
||||
is_complete=True,
|
||||
)
|
||||
interface2.refresh_from_db()
|
||||
self.assertPathIsSet(interface2, path2)
|
||||
|
||||
def test_301_create_path_via_existing_cable(self):
|
||||
"""
|
||||
[IF1] --C1-- [FP1] [RP1] --C2-- [RP2] [FP2] --C3-- [IF2]
|
||||
@@ -2510,3 +2584,33 @@ class CablePathTestCase(TestCase):
|
||||
is_active=True
|
||||
)
|
||||
self.assertEqual(CablePath.objects.count(), 0)
|
||||
|
||||
def test_402_exclude_circuit_loopback(self):
|
||||
interface = Interface.objects.create(device=self.device, name='Interface 1')
|
||||
circuittermination1 = CircuitTermination.objects.create(
|
||||
circuit=self.circuit,
|
||||
termination=self.site,
|
||||
term_side='A'
|
||||
)
|
||||
circuittermination2 = CircuitTermination.objects.create(
|
||||
circuit=self.circuit,
|
||||
termination=self.site,
|
||||
term_side='Z'
|
||||
)
|
||||
|
||||
# Create cables
|
||||
cable = Cable(
|
||||
a_terminations=[interface],
|
||||
b_terminations=[circuittermination1, circuittermination2]
|
||||
)
|
||||
cable.save()
|
||||
|
||||
path = self.assertPathExists(
|
||||
(interface, cable, (circuittermination1, circuittermination2)),
|
||||
is_active=True,
|
||||
is_complete=False,
|
||||
is_split=True
|
||||
)
|
||||
self.assertEqual(CablePath.objects.count(), 1)
|
||||
interface.refresh_from_db()
|
||||
self.assertPathIsSet(interface, path)
|
||||
|
||||
@@ -10,7 +10,7 @@ from netbox.choices import ColorChoices, WeightUnitChoices
|
||||
from tenancy.models import Tenant, TenantGroup
|
||||
from users.models import User
|
||||
from utilities.testing import ChangeLoggedFilterSetTests, create_test_device, create_test_virtualmachine
|
||||
from virtualization.models import Cluster, ClusterType, ClusterGroup, VMInterface, VirtualMachine
|
||||
from virtualization.models import Cluster, ClusterGroup, ClusterType, VirtualMachine, VMInterface
|
||||
from wireless.choices import WirelessChannelChoices, WirelessRoleChoices
|
||||
from wireless.models import WirelessLink
|
||||
|
||||
@@ -7164,9 +7164,20 @@ class MACAddressTestCase(TestCase, ChangeLoggedFilterSetTests):
|
||||
MACAddress(mac_address='00-00-00-05-01-01', assigned_object=vm_interfaces[1]),
|
||||
MACAddress(mac_address='00-00-00-06-01-01', assigned_object=vm_interfaces[2]),
|
||||
MACAddress(mac_address='00-00-00-06-01-02', assigned_object=vm_interfaces[2]),
|
||||
# unassigned
|
||||
MACAddress(mac_address='00-00-00-07-01-01'),
|
||||
)
|
||||
MACAddress.objects.bulk_create(mac_addresses)
|
||||
|
||||
# Set MAC addresses as primary
|
||||
for idx, interface in enumerate(interfaces):
|
||||
interface.primary_mac_address = mac_addresses[idx]
|
||||
interface.save()
|
||||
for idx, vm_interface in enumerate(vm_interfaces):
|
||||
# Offset by 4 for device MACs
|
||||
vm_interface.primary_mac_address = mac_addresses[idx + 4]
|
||||
vm_interface.save()
|
||||
|
||||
def test_mac_address(self):
|
||||
params = {'mac_address': ['00-00-00-01-01-01', '00-00-00-02-01-01']}
|
||||
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2)
|
||||
@@ -7198,3 +7209,15 @@ class MACAddressTestCase(TestCase, ChangeLoggedFilterSetTests):
|
||||
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2)
|
||||
params = {'vminterface': [vm_interfaces[0].name, vm_interfaces[1].name]}
|
||||
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2)
|
||||
|
||||
def test_assigned(self):
|
||||
params = {'assigned': True}
|
||||
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 8)
|
||||
params = {'assigned': False}
|
||||
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1)
|
||||
|
||||
def test_primary(self):
|
||||
params = {'primary': True}
|
||||
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 6)
|
||||
params = {'primary': False}
|
||||
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 3)
|
||||
|
||||
@@ -967,6 +967,18 @@ class CableTestCase(TestCase):
|
||||
with self.assertRaises(ValidationError):
|
||||
cable.clean()
|
||||
|
||||
def test_cannot_cable_to_mark_connected(self):
|
||||
"""
|
||||
Test that a cable cannot be connected to an interface marked as connected.
|
||||
"""
|
||||
device1 = Device.objects.get(name='TestDevice1')
|
||||
interface1 = Interface.objects.get(device__name='TestDevice2', name='eth1')
|
||||
|
||||
mark_connected_interface = Interface(device=device1, name='mark_connected1', mark_connected=True)
|
||||
cable = Cable(a_terminations=[mark_connected_interface], b_terminations=[interface1])
|
||||
with self.assertRaises(ValidationError):
|
||||
cable.clean()
|
||||
|
||||
|
||||
class VirtualDeviceContextTestCase(TestCase):
|
||||
|
||||
@@ -1019,3 +1031,92 @@ class VirtualDeviceContextTestCase(TestCase):
|
||||
vdc2 = VirtualDeviceContext(device=device, name="VDC 2", identifier=1, status='active')
|
||||
with self.assertRaises(ValidationError):
|
||||
vdc2.full_clean()
|
||||
|
||||
|
||||
class VirtualChassisTestCase(TestCase):
|
||||
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
site = Site.objects.create(name='Test Site 1', slug='test-site-1')
|
||||
manufacturer = Manufacturer.objects.create(name='Test Manufacturer 1', slug='test-manufacturer-1')
|
||||
devicetype = DeviceType.objects.create(
|
||||
manufacturer=manufacturer, model='Test Device Type 1', slug='test-device-type-1'
|
||||
)
|
||||
role = DeviceRole.objects.create(
|
||||
name='Test Device Role 1', slug='test-device-role-1', color='ff0000'
|
||||
)
|
||||
Device.objects.create(
|
||||
device_type=devicetype, role=role, name='TestDevice1', site=site
|
||||
)
|
||||
Device.objects.create(
|
||||
device_type=devicetype, role=role, name='TestDevice2', site=site
|
||||
)
|
||||
|
||||
def test_virtualchassis_deletion_clears_vc_position(self):
|
||||
"""
|
||||
Test that when a VirtualChassis is deleted, member devices have their
|
||||
vc_position and vc_priority fields set to None.
|
||||
"""
|
||||
devices = Device.objects.all()
|
||||
device1 = devices[0]
|
||||
device2 = devices[1]
|
||||
|
||||
# Create a VirtualChassis with two member devices
|
||||
vc = VirtualChassis.objects.create(name='Test VC', master=device1)
|
||||
|
||||
device1.virtual_chassis = vc
|
||||
device1.vc_position = 1
|
||||
device1.vc_priority = 10
|
||||
device1.save()
|
||||
|
||||
device2.virtual_chassis = vc
|
||||
device2.vc_position = 2
|
||||
device2.vc_priority = 20
|
||||
device2.save()
|
||||
|
||||
# Verify devices are members of the VC with positions set
|
||||
device1.refresh_from_db()
|
||||
device2.refresh_from_db()
|
||||
self.assertEqual(device1.virtual_chassis, vc)
|
||||
self.assertEqual(device1.vc_position, 1)
|
||||
self.assertEqual(device1.vc_priority, 10)
|
||||
self.assertEqual(device2.virtual_chassis, vc)
|
||||
self.assertEqual(device2.vc_position, 2)
|
||||
self.assertEqual(device2.vc_priority, 20)
|
||||
|
||||
# Delete the VirtualChassis
|
||||
vc.delete()
|
||||
|
||||
# Verify devices have vc_position and vc_priority set to None
|
||||
device1.refresh_from_db()
|
||||
device2.refresh_from_db()
|
||||
self.assertIsNone(device1.virtual_chassis)
|
||||
self.assertIsNone(device1.vc_position)
|
||||
self.assertIsNone(device1.vc_priority)
|
||||
self.assertIsNone(device2.virtual_chassis)
|
||||
self.assertIsNone(device2.vc_position)
|
||||
self.assertIsNone(device2.vc_priority)
|
||||
|
||||
def test_virtualchassis_duplicate_vc_position(self):
|
||||
"""
|
||||
Test that two devices cannot be assigned to the same vc_position
|
||||
within the same VirtualChassis.
|
||||
"""
|
||||
devices = Device.objects.all()
|
||||
device1 = devices[0]
|
||||
device2 = devices[1]
|
||||
|
||||
# Create a VirtualChassis
|
||||
vc = VirtualChassis.objects.create(name='Test VC')
|
||||
|
||||
# Assign first device to vc_position 1
|
||||
device1.virtual_chassis = vc
|
||||
device1.vc_position = 1
|
||||
device1.full_clean()
|
||||
device1.save()
|
||||
|
||||
# Try to assign second device to the same vc_position
|
||||
device2.virtual_chassis = vc
|
||||
device2.vc_position = 1
|
||||
with self.assertRaises(ValidationError):
|
||||
device2.full_clean()
|
||||
|
||||
@@ -986,6 +986,131 @@ inventory-items:
|
||||
ii1 = InventoryItemTemplate.objects.first()
|
||||
self.assertEqual(ii1.name, 'Inventory Item 1')
|
||||
|
||||
@override_settings(EXEMPT_VIEW_PERMISSIONS=['*'])
|
||||
def test_import_error_numbering(self):
|
||||
# Add all required permissions to the test user
|
||||
self.add_permissions(
|
||||
'dcim.view_devicetype',
|
||||
'dcim.add_devicetype',
|
||||
'dcim.add_consoleporttemplate',
|
||||
'dcim.add_consoleserverporttemplate',
|
||||
'dcim.add_powerporttemplate',
|
||||
'dcim.add_poweroutlettemplate',
|
||||
'dcim.add_interfacetemplate',
|
||||
'dcim.add_frontporttemplate',
|
||||
'dcim.add_rearporttemplate',
|
||||
'dcim.add_modulebaytemplate',
|
||||
'dcim.add_devicebaytemplate',
|
||||
'dcim.add_inventoryitemtemplate',
|
||||
)
|
||||
|
||||
import_data = '''
|
||||
---
|
||||
manufacturer: Manufacturer 1
|
||||
model: TEST-2001
|
||||
slug: test-2001
|
||||
u_height: 1
|
||||
module-bays:
|
||||
- name: Module Bay 1-1
|
||||
- name: Module Bay 1-2
|
||||
---
|
||||
- manufacturer: Manufacturer 1
|
||||
model: TEST-2002
|
||||
slug: test-2002
|
||||
u_height: 1
|
||||
module-bays:
|
||||
- name: Module Bay 2-1
|
||||
- name: Module Bay 2-2
|
||||
- not_name: Module Bay 2-3
|
||||
- manufacturer: Manufacturer 1
|
||||
model: TEST-2003
|
||||
slug: test-2003
|
||||
u_height: 1
|
||||
module-bays:
|
||||
- name: Module Bay 3-1
|
||||
'''
|
||||
form_data = {
|
||||
'data': import_data,
|
||||
'format': 'yaml'
|
||||
}
|
||||
|
||||
response = self.client.post(reverse('dcim:devicetype_bulk_import'), data=form_data, follow=True)
|
||||
self.assertHttpStatus(response, 200)
|
||||
self.assertContains(response, "Record 2 module-bays[3].name: This field is required.")
|
||||
|
||||
@override_settings(EXEMPT_VIEW_PERMISSIONS=['*'])
|
||||
def test_import_nolist(self):
|
||||
# Add all required permissions to the test user
|
||||
self.add_permissions(
|
||||
'dcim.view_devicetype',
|
||||
'dcim.add_devicetype',
|
||||
'dcim.add_consoleporttemplate',
|
||||
'dcim.add_consoleserverporttemplate',
|
||||
'dcim.add_powerporttemplate',
|
||||
'dcim.add_poweroutlettemplate',
|
||||
'dcim.add_interfacetemplate',
|
||||
'dcim.add_frontporttemplate',
|
||||
'dcim.add_rearporttemplate',
|
||||
'dcim.add_modulebaytemplate',
|
||||
'dcim.add_devicebaytemplate',
|
||||
'dcim.add_inventoryitemtemplate',
|
||||
)
|
||||
|
||||
for value in ('', 'null', '3', '"My console port"', '{name: "My other console port"}'):
|
||||
with self.subTest(value=value):
|
||||
import_data = f'''
|
||||
manufacturer: Manufacturer 1
|
||||
model: TEST-3000
|
||||
slug: test-3000
|
||||
u_height: 1
|
||||
console-ports: {value}
|
||||
'''
|
||||
form_data = {
|
||||
'data': import_data,
|
||||
'format': 'yaml'
|
||||
}
|
||||
|
||||
response = self.client.post(reverse('dcim:devicetype_bulk_import'), data=form_data, follow=True)
|
||||
self.assertHttpStatus(response, 200)
|
||||
self.assertContains(response, "Record 1 console-ports: Must be a list.")
|
||||
|
||||
@override_settings(EXEMPT_VIEW_PERMISSIONS=['*'])
|
||||
def test_import_nodict(self):
|
||||
# Add all required permissions to the test user
|
||||
self.add_permissions(
|
||||
'dcim.view_devicetype',
|
||||
'dcim.add_devicetype',
|
||||
'dcim.add_consoleporttemplate',
|
||||
'dcim.add_consoleserverporttemplate',
|
||||
'dcim.add_powerporttemplate',
|
||||
'dcim.add_poweroutlettemplate',
|
||||
'dcim.add_interfacetemplate',
|
||||
'dcim.add_frontporttemplate',
|
||||
'dcim.add_rearporttemplate',
|
||||
'dcim.add_modulebaytemplate',
|
||||
'dcim.add_devicebaytemplate',
|
||||
'dcim.add_inventoryitemtemplate',
|
||||
)
|
||||
|
||||
for value in ('', 'null', '3', '"My console port"', '["My other console port"]'):
|
||||
with self.subTest(value=value):
|
||||
import_data = f'''
|
||||
manufacturer: Manufacturer 1
|
||||
model: TEST-4000
|
||||
slug: test-4000
|
||||
u_height: 1
|
||||
console-ports:
|
||||
- {value}
|
||||
'''
|
||||
form_data = {
|
||||
'data': import_data,
|
||||
'format': 'yaml'
|
||||
}
|
||||
|
||||
response = self.client.post(reverse('dcim:devicetype_bulk_import'), data=form_data, follow=True)
|
||||
self.assertHttpStatus(response, 200)
|
||||
self.assertContains(response, "Record 1 console-ports[1]: Must be a dictionary.")
|
||||
|
||||
def test_export_objects(self):
|
||||
url = reverse('dcim:devicetype_list')
|
||||
self.add_permissions('dcim.view_devicetype')
|
||||
@@ -2834,10 +2959,19 @@ class InterfaceTestCase(ViewTestCases.DeviceComponentViewTestCase):
|
||||
}
|
||||
|
||||
cls.csv_data = (
|
||||
"device,name,type,vrf.pk,poe_mode,poe_type",
|
||||
f"Device 1,Interface 4,1000base-t,{vrfs[0].pk},pse,type1-ieee802.3af",
|
||||
f"Device 1,Interface 5,1000base-t,{vrfs[0].pk},pse,type1-ieee802.3af",
|
||||
f"Device 1,Interface 6,1000base-t,{vrfs[0].pk},pse,type1-ieee802.3af",
|
||||
"device,name,type,vrf.pk,poe_mode,poe_type,mode,untagged_vlan,tagged_vlans",
|
||||
(
|
||||
f"Device 1,Interface 4,1000base-t,{vrfs[0].pk},pse,type1-ieee802.3af,"
|
||||
f"tagged,{vlans[0].vid},'{','.join([str(v.vid) for v in vlans[1:4]])}'"
|
||||
),
|
||||
(
|
||||
f"Device 1,Interface 5,1000base-t,{vrfs[0].pk},pse,type1-ieee802.3af,"
|
||||
f"tagged,{vlans[0].vid},'{','.join([str(v.vid) for v in vlans[1:4]])}'"
|
||||
),
|
||||
(
|
||||
f"Device 1,Interface 6,1000base-t,{vrfs[0].pk},pse,type1-ieee802.3af,"
|
||||
f"tagged,{vlans[0].vid},'{','.join([str(v.vid) for v in vlans[1:4]])}'"
|
||||
),
|
||||
)
|
||||
|
||||
cls.csv_update_data = (
|
||||
@@ -2885,6 +3019,43 @@ class InterfaceTestCase(ViewTestCases.DeviceComponentViewTestCase):
|
||||
self.client.post(self._get_url('bulk_delete'), data)
|
||||
self.assertEqual(device.interfaces.count(), 4) # Child & parent were both deleted
|
||||
|
||||
def test_rename_select_all_spans_pages(self):
|
||||
"""
|
||||
Tests the bulk rename functionality for interfaces spanning multiple pages in the UI.
|
||||
"""
|
||||
device_name = 'DeviceRename'
|
||||
device = create_test_device(device_name)
|
||||
# Create > default page size (25) so selection spans multiple pages
|
||||
for i in range(37):
|
||||
Interface.objects.create(device=device, name=f'eth{i}')
|
||||
|
||||
self.add_permissions('dcim.change_interface')
|
||||
|
||||
# Filter to this device's interfaces to simulate a real list filter
|
||||
get_qs = {'device_id': Device.objects.get(name=device_name).pk}
|
||||
post_url = f'{self._get_url("bulk_rename")}?device_id={get_qs["device_id"]}'
|
||||
|
||||
# Preview step: ensure 37 selected (not just one page)
|
||||
data = {'_preview': '1', '_all': '1', 'find': 'eth', 'replace': 'xe'}
|
||||
response = self.client.post(post_url, data=data)
|
||||
self.assertHttpStatus(response, 200)
|
||||
self.assertEqual(len(response.context['selected_objects']), 37)
|
||||
|
||||
# Extract pk[] just like the browser would submit on Apply
|
||||
# (either from the form's initial, or from selected_objects)
|
||||
pk_list = response.context['form'].initial.get('pk')
|
||||
if not pk_list:
|
||||
pk_list = [obj.pk for obj in response.context['selected_objects']]
|
||||
pk_list = [str(pk) for pk in pk_list]
|
||||
|
||||
# Apply step: include pk[] in the POST
|
||||
apply_data = {'_apply': '1', '_all': '1', 'find': 'eth', 'replace': 'xe', 'pk': pk_list}
|
||||
response = self.client.post(post_url, data=apply_data)
|
||||
|
||||
# On success the view redirects back to the return URL
|
||||
self.assertHttpStatus(response, 302)
|
||||
self.assertEqual(Interface.objects.filter(device=device, name__startswith='xe').count(), 37)
|
||||
|
||||
|
||||
class FrontPortTestCase(ViewTestCases.DeviceComponentViewTestCase):
|
||||
model = FrontPort
|
||||
|
||||
@@ -295,6 +295,7 @@ class RegionBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(Region, 'bulk_rename', path='rename', detail=False)
|
||||
class RegionBulkRenameView(generic.BulkRenameView):
|
||||
queryset = Region.objects.all()
|
||||
filterset = filtersets.RegionFilterSet
|
||||
|
||||
|
||||
@register_model_view(Region, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -426,6 +427,7 @@ class SiteGroupBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(SiteGroup, 'bulk_rename', path='rename', detail=False)
|
||||
class SiteGroupBulkRenameView(generic.BulkRenameView):
|
||||
queryset = SiteGroup.objects.all()
|
||||
filterset = filtersets.SiteGroupFilterSet
|
||||
|
||||
|
||||
@register_model_view(SiteGroup, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -516,6 +518,7 @@ class SiteBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(Site, 'bulk_rename', path='rename', detail=False)
|
||||
class SiteBulkRenameView(generic.BulkRenameView):
|
||||
queryset = Site.objects.all()
|
||||
filterset = filtersets.SiteFilterSet
|
||||
|
||||
|
||||
@register_model_view(Site, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -625,6 +628,7 @@ class LocationBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(Location, 'bulk_rename', path='rename', detail=False)
|
||||
class LocationBulkRenameView(generic.BulkRenameView):
|
||||
queryset = Location.objects.all()
|
||||
filterset = filtersets.LocationFilterSet
|
||||
|
||||
|
||||
@register_model_view(Location, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -695,6 +699,7 @@ class RackRoleBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(RackRole, 'bulk_rename', path='rename', detail=False)
|
||||
class RackRoleBulkRenameView(generic.BulkRenameView):
|
||||
queryset = RackRole.objects.all()
|
||||
filterset = filtersets.RackRoleFilterSet
|
||||
|
||||
|
||||
@register_model_view(RackRole, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -760,6 +765,7 @@ class RackTypeBulkEditView(generic.BulkEditView):
|
||||
class RackTypeBulkRenameView(generic.BulkRenameView):
|
||||
queryset = RackType.objects.all()
|
||||
field_name = 'model'
|
||||
filterset = filtersets.RackTypeFilterSet
|
||||
|
||||
|
||||
@register_model_view(RackType, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -944,6 +950,7 @@ class RackBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(Rack, 'bulk_rename', path='rename', detail=False)
|
||||
class RackBulkRenameView(generic.BulkRenameView):
|
||||
queryset = Rack.objects.all()
|
||||
filterset = filtersets.RackFilterSet
|
||||
|
||||
|
||||
@register_model_view(Rack, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -1083,6 +1090,7 @@ class ManufacturerBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(Manufacturer, 'bulk_rename', path='rename', detail=False)
|
||||
class ManufacturerBulkRenameView(generic.BulkRenameView):
|
||||
queryset = Manufacturer.objects.all()
|
||||
filterset = filtersets.ManufacturerFilterSet
|
||||
|
||||
|
||||
@register_model_view(Manufacturer, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -1336,6 +1344,7 @@ class DeviceTypeBulkEditView(generic.BulkEditView):
|
||||
class DeviceTypeBulkRenameView(generic.BulkRenameView):
|
||||
queryset = DeviceType.objects.all()
|
||||
field_name = 'model'
|
||||
filterset = filtersets.DeviceTypeFilterSet
|
||||
|
||||
|
||||
@register_model_view(DeviceType, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -1397,6 +1406,7 @@ class ModuleTypeProfileBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(ModuleTypeProfile, 'bulk_rename', path='rename', detail=False)
|
||||
class ModuleTypeProfileBulkRenameView(generic.BulkRenameView):
|
||||
queryset = ModuleTypeProfile.objects.all()
|
||||
filterset = filtersets.ModuleTypeProfileFilterSet
|
||||
|
||||
|
||||
@register_model_view(ModuleTypeProfile, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -1612,6 +1622,7 @@ class ModuleTypeBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(ModuleType, 'bulk_rename', path='rename', detail=False)
|
||||
class ModuleTypeBulkRenameView(generic.BulkRenameView):
|
||||
queryset = ModuleType.objects.all()
|
||||
filterset = filtersets.ModuleTypeFilterSet
|
||||
|
||||
|
||||
@register_model_view(ModuleType, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -2100,6 +2111,7 @@ class DeviceRoleBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(DeviceRole, 'bulk_rename', path='rename', detail=False)
|
||||
class DeviceRoleBulkRenameView(generic.BulkRenameView):
|
||||
queryset = DeviceRole.objects.all()
|
||||
filterset = filtersets.DeviceRoleFilterSet
|
||||
|
||||
|
||||
@register_model_view(DeviceRole, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -2175,6 +2187,7 @@ class PlatformBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(Platform, 'bulk_rename', path='rename', detail=False)
|
||||
class PlatformBulkRenameView(generic.BulkRenameView):
|
||||
queryset = Platform.objects.all()
|
||||
filterset = filtersets.PlatformFilterSet
|
||||
|
||||
|
||||
@register_model_view(Platform, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -2582,6 +2595,7 @@ class ConsolePortBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(ConsolePort, 'bulk_rename', path='rename', detail=False)
|
||||
class ConsolePortBulkRenameView(generic.BulkRenameView):
|
||||
queryset = ConsolePort.objects.all()
|
||||
filterset = filtersets.ConsolePortFilterSet
|
||||
|
||||
|
||||
@register_model_view(ConsolePort, 'bulk_disconnect', path='disconnect', detail=False)
|
||||
@@ -2652,6 +2666,7 @@ class ConsoleServerPortBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(ConsoleServerPort, 'bulk_rename', path='rename', detail=False)
|
||||
class ConsoleServerPortBulkRenameView(generic.BulkRenameView):
|
||||
queryset = ConsoleServerPort.objects.all()
|
||||
filterset = filtersets.ConsoleServerPortFilterSet
|
||||
|
||||
|
||||
@register_model_view(ConsoleServerPort, 'bulk_disconnect', path='disconnect', detail=False)
|
||||
@@ -2722,6 +2737,7 @@ class PowerPortBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(PowerPort, 'bulk_rename', path='rename', detail=False)
|
||||
class PowerPortBulkRenameView(generic.BulkRenameView):
|
||||
queryset = PowerPort.objects.all()
|
||||
filterset = filtersets.PowerPortFilterSet
|
||||
|
||||
|
||||
@register_model_view(PowerPort, 'bulk_disconnect', path='disconnect', detail=False)
|
||||
@@ -2792,6 +2808,7 @@ class PowerOutletBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(PowerOutlet, 'bulk_rename', path='rename', detail=False)
|
||||
class PowerOutletBulkRenameView(generic.BulkRenameView):
|
||||
queryset = PowerOutlet.objects.all()
|
||||
filterset = filtersets.PowerOutletFilterSet
|
||||
|
||||
|
||||
@register_model_view(PowerOutlet, 'bulk_disconnect', path='disconnect', detail=False)
|
||||
@@ -2934,6 +2951,7 @@ class InterfaceBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(Interface, 'bulk_rename', path='rename', detail=False)
|
||||
class InterfaceBulkRenameView(generic.BulkRenameView):
|
||||
queryset = Interface.objects.all()
|
||||
filterset = filtersets.InterfaceFilterSet
|
||||
|
||||
|
||||
@register_model_view(Interface, 'bulk_disconnect', path='disconnect', detail=False)
|
||||
@@ -3005,6 +3023,7 @@ class FrontPortBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(FrontPort, 'bulk_rename', path='rename', detail=False)
|
||||
class FrontPortBulkRenameView(generic.BulkRenameView):
|
||||
queryset = FrontPort.objects.all()
|
||||
filterset = filtersets.FrontPortFilterSet
|
||||
|
||||
|
||||
@register_model_view(FrontPort, 'bulk_disconnect', path='disconnect', detail=False)
|
||||
@@ -3080,6 +3099,7 @@ class RearPortBulkRenameView(generic.BulkRenameView):
|
||||
@register_model_view(RearPort, 'bulk_disconnect', path='disconnect', detail=False)
|
||||
class RearPortBulkDisconnectView(BulkDisconnectView):
|
||||
queryset = RearPort.objects.all()
|
||||
filterset = filtersets.RearPortFilterSet
|
||||
|
||||
|
||||
@register_model_view(RearPort, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -3145,6 +3165,7 @@ class ModuleBayBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(ModuleBay, 'bulk_rename', path='rename', detail=False)
|
||||
class ModuleBayBulkRenameView(generic.BulkRenameView):
|
||||
queryset = ModuleBay.objects.all()
|
||||
filterset = filtersets.ModuleBayFilterSet
|
||||
|
||||
|
||||
@register_model_view(ModuleBay, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -3287,6 +3308,7 @@ class DeviceBayBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(DeviceBay, 'bulk_rename', path='rename', detail=False)
|
||||
class DeviceBayBulkRenameView(generic.BulkRenameView):
|
||||
queryset = DeviceBay.objects.all()
|
||||
filterset = filtersets.DeviceBayFilterSet
|
||||
|
||||
|
||||
@register_model_view(DeviceBay, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -3348,6 +3370,7 @@ class InventoryItemBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(InventoryItem, 'bulk_rename', path='rename', detail=False)
|
||||
class InventoryItemBulkRenameView(generic.BulkRenameView):
|
||||
queryset = InventoryItem.objects.all()
|
||||
filterset = filtersets.InventoryItemFilterSet
|
||||
|
||||
|
||||
@register_model_view(InventoryItem, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -3431,6 +3454,7 @@ class InventoryItemRoleBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(InventoryItemRole, 'bulk_rename', path='rename', detail=False)
|
||||
class InventoryItemRoleBulkRenameView(generic.BulkRenameView):
|
||||
queryset = InventoryItemRole.objects.all()
|
||||
filterset = filtersets.InventoryItemRoleFilterSet
|
||||
|
||||
|
||||
@register_model_view(InventoryItemRole, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -3634,6 +3658,7 @@ class CableBulkEditView(generic.BulkEditView):
|
||||
class CableBulkRenameView(generic.BulkRenameView):
|
||||
queryset = Cable.objects.all()
|
||||
field_name = 'label'
|
||||
filterset = filtersets.CableFilterSet
|
||||
|
||||
|
||||
@register_model_view(Cable, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -3754,6 +3779,7 @@ class VirtualChassisEditView(ObjectPermissionRequiredMixin, GetReturnURLMixin, V
|
||||
def post(self, request, pk):
|
||||
|
||||
virtual_chassis = get_object_or_404(self.queryset, pk=pk)
|
||||
virtual_chassis.snapshot()
|
||||
VCMemberFormSet = modelformset_factory(
|
||||
model=Device,
|
||||
form=forms.DeviceVCMembershipForm,
|
||||
@@ -3806,9 +3832,7 @@ class VirtualChassisAddMemberView(ObjectPermissionRequiredMixin, GetReturnURLMix
|
||||
return 'dcim.change_virtualchassis'
|
||||
|
||||
def get(self, request, pk):
|
||||
|
||||
virtual_chassis = get_object_or_404(self.queryset, pk=pk)
|
||||
|
||||
initial_data = {k: request.GET[k] for k in request.GET}
|
||||
member_select_form = forms.VCMemberSelectForm(initial=initial_data)
|
||||
membership_form = forms.DeviceVCMembershipForm(initial=initial_data)
|
||||
@@ -3821,20 +3845,20 @@ class VirtualChassisAddMemberView(ObjectPermissionRequiredMixin, GetReturnURLMix
|
||||
})
|
||||
|
||||
def post(self, request, pk):
|
||||
|
||||
virtual_chassis = get_object_or_404(self.queryset, pk=pk)
|
||||
|
||||
member_select_form = forms.VCMemberSelectForm(request.POST)
|
||||
|
||||
if member_select_form.is_valid():
|
||||
|
||||
device = member_select_form.cleaned_data['device']
|
||||
device.snapshot()
|
||||
device.virtual_chassis = virtual_chassis
|
||||
data = {k: request.POST[k] for k in ['vc_position', 'vc_priority']}
|
||||
data = {
|
||||
'vc_position': request.POST['vc_position'],
|
||||
'vc_priority': request.POST['vc_priority'],
|
||||
}
|
||||
membership_form = forms.DeviceVCMembershipForm(data=data, validate_vc_position=True, instance=device)
|
||||
|
||||
if membership_form.is_valid():
|
||||
|
||||
membership_form.save()
|
||||
messages.success(request, mark_safe(
|
||||
_('Added member <a href="{url}">{device}</a>').format(
|
||||
@@ -3844,11 +3868,9 @@ class VirtualChassisAddMemberView(ObjectPermissionRequiredMixin, GetReturnURLMix
|
||||
|
||||
if '_addanother' in request.POST and safe_for_redirect(request.get_full_path()):
|
||||
return redirect(request.get_full_path())
|
||||
|
||||
return redirect(self.get_return_url(request, device))
|
||||
|
||||
else:
|
||||
|
||||
membership_form = forms.DeviceVCMembershipForm(data=request.POST)
|
||||
|
||||
return render(request, 'dcim/virtualchassis_add_member.html', {
|
||||
@@ -3866,7 +3888,6 @@ class VirtualChassisRemoveMemberView(ObjectPermissionRequiredMixin, GetReturnURL
|
||||
return 'dcim.change_device'
|
||||
|
||||
def get(self, request, pk):
|
||||
|
||||
device = get_object_or_404(self.queryset, pk=pk, virtual_chassis__isnull=False)
|
||||
form = ConfirmationForm(initial=request.GET)
|
||||
|
||||
@@ -3877,7 +3898,6 @@ class VirtualChassisRemoveMemberView(ObjectPermissionRequiredMixin, GetReturnURL
|
||||
})
|
||||
|
||||
def post(self, request, pk):
|
||||
|
||||
device = get_object_or_404(self.queryset, pk=pk, virtual_chassis__isnull=False)
|
||||
form = ConfirmationForm(request.POST)
|
||||
|
||||
@@ -3891,13 +3911,11 @@ class VirtualChassisRemoveMemberView(ObjectPermissionRequiredMixin, GetReturnURL
|
||||
return redirect(device.get_absolute_url())
|
||||
|
||||
if form.is_valid():
|
||||
|
||||
devices = Device.objects.filter(pk=device.pk)
|
||||
for device in devices:
|
||||
device.virtual_chassis = None
|
||||
device.vc_position = None
|
||||
device.vc_priority = None
|
||||
device.save()
|
||||
device.snapshot()
|
||||
device.virtual_chassis = None
|
||||
device.vc_position = None
|
||||
device.vc_priority = None
|
||||
device.save()
|
||||
|
||||
msg = _('Removed {device} from virtual chassis {chassis}').format(
|
||||
device=device,
|
||||
@@ -3931,6 +3949,7 @@ class VirtualChassisBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(VirtualChassis, 'bulk_rename', path='rename', detail=False)
|
||||
class VirtualChassisBulkRenameView(generic.BulkRenameView):
|
||||
queryset = VirtualChassis.objects.all()
|
||||
filterset = filtersets.VirtualChassisFilterSet
|
||||
|
||||
|
||||
@register_model_view(VirtualChassis, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -3993,6 +4012,7 @@ class PowerPanelBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(PowerPanel, 'bulk_rename', path='rename', detail=False)
|
||||
class PowerPanelBulkRenameView(generic.BulkRenameView):
|
||||
queryset = PowerPanel.objects.all()
|
||||
filterset = filtersets.PowerPanelFilterSet
|
||||
|
||||
|
||||
@register_model_view(PowerPanel, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -4050,6 +4070,7 @@ class PowerFeedBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(PowerFeed, 'bulk_rename', path='rename', detail=False)
|
||||
class PowerFeedBulkRenameView(generic.BulkRenameView):
|
||||
queryset = PowerFeed.objects.all()
|
||||
filterset = filtersets.PowerFeedFilterSet
|
||||
|
||||
|
||||
@register_model_view(PowerFeed, 'bulk_disconnect', path='disconnect', detail=False)
|
||||
@@ -4128,6 +4149,7 @@ class VirtualDeviceContextBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(VirtualDeviceContext, 'bulk_rename', path='rename', detail=False)
|
||||
class VirtualDeviceContextBulkRenameView(generic.BulkRenameView):
|
||||
queryset = VirtualDeviceContext.objects.all()
|
||||
filterset = filtersets.VirtualDeviceContextFilterSet
|
||||
|
||||
|
||||
@register_model_view(VirtualDeviceContext, 'bulk_delete', path='delete', detail=False)
|
||||
|
||||
@@ -23,6 +23,6 @@ class ConfigTemplateSerializer(ChangeLogMessageSerializer, TaggableModelSerializ
|
||||
fields = [
|
||||
'id', 'url', 'display_url', 'display', 'name', 'description', 'environment_params', 'template_code',
|
||||
'mime_type', 'file_name', 'file_extension', 'as_attachment', 'data_source', 'data_path', 'data_file',
|
||||
'data_synced', 'tags', 'created', 'last_updated',
|
||||
'auto_sync_enabled', 'data_synced', 'tags', 'created', 'last_updated',
|
||||
]
|
||||
brief_fields = ('id', 'url', 'display', 'name', 'description')
|
||||
|
||||
@@ -5,6 +5,7 @@ from rest_framework import serializers
|
||||
from core.api.serializers_.jobs import JobSerializer
|
||||
from extras.models import Script
|
||||
from netbox.api.serializers import ValidatedModelSerializer
|
||||
from utilities.datetime import local_now
|
||||
|
||||
__all__ = (
|
||||
'ScriptDetailSerializer',
|
||||
@@ -66,11 +67,31 @@ class ScriptInputSerializer(serializers.Serializer):
|
||||
interval = serializers.IntegerField(required=False, allow_null=True)
|
||||
|
||||
def validate_schedule_at(self, value):
|
||||
if value and not self.context['script'].python_class.scheduling_enabled:
|
||||
raise serializers.ValidationError(_("Scheduling is not enabled for this script."))
|
||||
"""
|
||||
Validates the specified schedule time for a script execution.
|
||||
"""
|
||||
if value:
|
||||
if not self.context['script'].python_class.scheduling_enabled:
|
||||
raise serializers.ValidationError(_('Scheduling is not enabled for this script.'))
|
||||
if value < local_now():
|
||||
raise serializers.ValidationError(_('Scheduled time must be in the future.'))
|
||||
return value
|
||||
|
||||
def validate_interval(self, value):
|
||||
"""
|
||||
Validates the provided interval based on the script's scheduling configuration.
|
||||
"""
|
||||
if value and not self.context['script'].python_class.scheduling_enabled:
|
||||
raise serializers.ValidationError(_("Scheduling is not enabled for this script."))
|
||||
raise serializers.ValidationError(_('Scheduling is not enabled for this script.'))
|
||||
return value
|
||||
|
||||
def validate(self, data):
|
||||
"""
|
||||
Validates the given data and ensures the necessary fields are populated.
|
||||
"""
|
||||
# Set the schedule_at time to now if only an interval is provided
|
||||
# while handling the case where schedule_at is null.
|
||||
if data.get('interval') and not data.get('schedule_at'):
|
||||
data['schedule_at'] = local_now()
|
||||
|
||||
return super().validate(data)
|
||||
|
||||
@@ -267,6 +267,14 @@ class ScriptViewSet(ModelViewSet):
|
||||
_ignore_model_permissions = True
|
||||
lookup_value_regex = '[^/]+' # Allow dots
|
||||
|
||||
def initial(self, request, *args, **kwargs):
|
||||
super().initial(request, *args, **kwargs)
|
||||
|
||||
# Restrict the view's QuerySet to allow only the permitted objects
|
||||
if request.user.is_authenticated:
|
||||
action = 'run' if request.method == 'POST' else 'view'
|
||||
self.queryset = self.queryset.restrict(request.user, action)
|
||||
|
||||
def _get_script(self, pk):
|
||||
# If pk is numeric, retrieve script by ID
|
||||
if pk.isnumeric():
|
||||
@@ -290,10 +298,12 @@ class ScriptViewSet(ModelViewSet):
|
||||
"""
|
||||
Run a Script identified by its numeric PK or module & name and return the pending Job as the result
|
||||
"""
|
||||
if not request.user.has_perm('extras.run_script'):
|
||||
raise PermissionDenied("This user does not have permission to run scripts.")
|
||||
|
||||
script = self._get_script(pk)
|
||||
|
||||
if not request.user.has_perm('extras.run_script', obj=script):
|
||||
raise PermissionDenied("This user does not have permission to run this script.")
|
||||
|
||||
input_serializer = serializers.ScriptInputSerializer(
|
||||
data=request.data,
|
||||
context={'script': script}
|
||||
|
||||
@@ -209,7 +209,10 @@ class ObjectCountsWidget(DashboardWidget):
|
||||
url = get_action_url(model, action='list')
|
||||
except NoReverseMatch:
|
||||
url = None
|
||||
qs = model.objects.restrict(request.user, 'view')
|
||||
try:
|
||||
qs = model.objects.restrict(request.user, 'view')
|
||||
except AttributeError:
|
||||
qs = model.objects.all()
|
||||
# Apply any specified filters
|
||||
if url and (filters := self.config.get('filters')):
|
||||
params = dict_to_querydict(filters)
|
||||
|
||||
@@ -134,11 +134,18 @@ def process_event_rules(event_rules, object_type, event_type, data, username=Non
|
||||
|
||||
# Enqueue a Job to record the script's execution
|
||||
from extras.jobs import ScriptJob
|
||||
params = {
|
||||
"instance": event_rule.action_object,
|
||||
"name": script.name,
|
||||
"user": user,
|
||||
"data": event_data
|
||||
}
|
||||
if snapshots:
|
||||
params["snapshots"] = snapshots
|
||||
if request:
|
||||
params["request"] = copy_safe_request(request)
|
||||
ScriptJob.enqueue(
|
||||
instance=event_rule.action_object,
|
||||
name=script.name,
|
||||
user=user,
|
||||
data=event_data
|
||||
**params
|
||||
)
|
||||
|
||||
# Notification groups
|
||||
|
||||
@@ -398,8 +398,12 @@ class ConfigTemplateBulkEditForm(ChangelogMessageMixin, BulkEditForm):
|
||||
required=False,
|
||||
widget=BulkEditNullBooleanSelect()
|
||||
)
|
||||
|
||||
nullable_fields = ('description', 'mime_type', 'file_name', 'file_extension')
|
||||
auto_sync_enabled = forms.NullBooleanField(
|
||||
label=_('Auto sync enabled'),
|
||||
required=False,
|
||||
widget=BulkEditNullBooleanSelect()
|
||||
)
|
||||
nullable_fields = ('description', 'mime_type', 'file_name', 'file_extension', 'auto_sync_enabled',)
|
||||
|
||||
|
||||
class ImageAttachmentBulkEditForm(ChangelogMessageMixin, BulkEditForm):
|
||||
|
||||
@@ -5,7 +5,7 @@ from django.contrib.postgres.forms import SimpleArrayField
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from core.models import ObjectType
|
||||
from core.models import DataFile, DataSource, ObjectType
|
||||
from extras.choices import *
|
||||
from extras.models import *
|
||||
from netbox.events import get_event_type_choices
|
||||
@@ -160,14 +160,41 @@ class ConfigContextProfileImportForm(NetBoxModelImportForm):
|
||||
|
||||
|
||||
class ConfigTemplateImportForm(CSVModelForm):
|
||||
data_source = CSVModelChoiceField(
|
||||
label=_('Data source'),
|
||||
queryset=DataSource.objects.all(),
|
||||
required=False,
|
||||
to_field_name='name',
|
||||
help_text=_('Data source which provides the data file')
|
||||
)
|
||||
data_file = CSVModelChoiceField(
|
||||
label=_('Data file'),
|
||||
queryset=DataFile.objects.all(),
|
||||
required=False,
|
||||
to_field_name='path',
|
||||
help_text=_('Data file containing the template code')
|
||||
)
|
||||
auto_sync_enabled = forms.BooleanField(
|
||||
required=False,
|
||||
label=_('Auto sync enabled'),
|
||||
help_text=_("Enable automatic synchronization of template content when the data file is updated")
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = ConfigTemplate
|
||||
fields = (
|
||||
'name', 'description', 'template_code', 'environment_params', 'mime_type', 'file_name', 'file_extension',
|
||||
'as_attachment', 'tags',
|
||||
'name', 'description', 'template_code', 'data_source', 'data_file', 'auto_sync_enabled',
|
||||
'environment_params', 'mime_type', 'file_name', 'file_extension', 'as_attachment', 'tags',
|
||||
)
|
||||
|
||||
def clean(self):
|
||||
super().clean()
|
||||
|
||||
# Make sure template_code is None when it's not included in the uploaded data
|
||||
if not self.data.get('template_code') and not self.data.get('data_file'):
|
||||
raise forms.ValidationError(_("Must specify either local content or a data file"))
|
||||
return self.cleaned_data['template_code']
|
||||
|
||||
|
||||
class SavedFilterImportForm(CSVModelForm):
|
||||
object_types = CSVMultipleContentTypeField(
|
||||
@@ -272,6 +299,10 @@ class JournalEntryImportForm(NetBoxModelImportForm):
|
||||
choices=JournalEntryKindChoices,
|
||||
help_text=_('The classification of entry')
|
||||
)
|
||||
comments = forms.CharField(
|
||||
label=_('Comments'),
|
||||
required=True
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = JournalEntry
|
||||
|
||||
@@ -42,17 +42,20 @@ class CustomFieldFilterForm(SavedFiltersMixin, FilterForm):
|
||||
model = CustomField
|
||||
fieldsets = (
|
||||
FieldSet('q', 'filter_id'),
|
||||
FieldSet(
|
||||
'type', 'related_object_type_id', 'group_name', 'weight', 'required', 'unique', 'choice_set_id',
|
||||
name=_('Attributes')
|
||||
),
|
||||
FieldSet('object_type_id', 'type', 'group_name', 'weight', 'required', 'unique', name=_('Attributes')),
|
||||
FieldSet('choice_set_id', 'related_object_type_id', name=_('Type Options')),
|
||||
FieldSet('ui_visible', 'ui_editable', 'is_cloneable', name=_('Behavior')),
|
||||
FieldSet('validation_minimum', 'validation_maximum', 'validation_regex', name=_('Validation')),
|
||||
)
|
||||
related_object_type_id = ContentTypeMultipleChoiceField(
|
||||
object_type_id = ContentTypeMultipleChoiceField(
|
||||
queryset=ObjectType.objects.with_feature('custom_fields'),
|
||||
required=False,
|
||||
label=_('Related object type')
|
||||
label=_('Object types'),
|
||||
)
|
||||
related_object_type_id = ContentTypeMultipleChoiceField(
|
||||
queryset=ObjectType.objects.public(),
|
||||
required=False,
|
||||
label=_('Related object type'),
|
||||
)
|
||||
type = forms.MultipleChoiceField(
|
||||
choices=CustomFieldTypeChoices,
|
||||
@@ -136,12 +139,12 @@ class CustomLinkFilterForm(SavedFiltersMixin, FilterForm):
|
||||
model = CustomLink
|
||||
fieldsets = (
|
||||
FieldSet('q', 'filter_id'),
|
||||
FieldSet('object_type', 'enabled', 'new_window', 'weight', name=_('Attributes')),
|
||||
FieldSet('object_type_id', 'enabled', 'new_window', 'weight', name=_('Attributes')),
|
||||
)
|
||||
object_type = ContentTypeMultipleChoiceField(
|
||||
object_type_id = ContentTypeMultipleChoiceField(
|
||||
label=_('Object types'),
|
||||
queryset=ObjectType.objects.with_feature('custom_links'),
|
||||
required=False
|
||||
required=False,
|
||||
)
|
||||
enabled = forms.NullBooleanField(
|
||||
label=_('Enabled'),
|
||||
@@ -230,12 +233,12 @@ class SavedFilterFilterForm(SavedFiltersMixin, FilterForm):
|
||||
model = SavedFilter
|
||||
fieldsets = (
|
||||
FieldSet('q', 'filter_id'),
|
||||
FieldSet('object_type', 'enabled', 'shared', 'weight', name=_('Attributes')),
|
||||
FieldSet('object_type_id', 'enabled', 'shared', 'weight', name=_('Attributes')),
|
||||
)
|
||||
object_type = ContentTypeMultipleChoiceField(
|
||||
object_type_id = ContentTypeMultipleChoiceField(
|
||||
label=_('Object types'),
|
||||
queryset=ObjectType.objects.public(),
|
||||
required=False
|
||||
required=False,
|
||||
)
|
||||
enabled = forms.NullBooleanField(
|
||||
label=_('Enabled'),
|
||||
@@ -476,7 +479,7 @@ class ConfigTemplateFilterForm(SavedFiltersMixin, FilterForm):
|
||||
model = ConfigTemplate
|
||||
fieldsets = (
|
||||
FieldSet('q', 'filter_id', 'tag'),
|
||||
FieldSet('data_source_id', 'data_file_id', name=_('Data')),
|
||||
FieldSet('data_source_id', 'data_file_id', 'auto_sync_enabled', name=_('Data')),
|
||||
FieldSet('mime_type', 'file_name', 'file_extension', 'as_attachment', name=_('Rendering'))
|
||||
)
|
||||
data_source_id = DynamicModelMultipleChoiceField(
|
||||
@@ -492,6 +495,13 @@ class ConfigTemplateFilterForm(SavedFiltersMixin, FilterForm):
|
||||
'source_id': '$data_source_id'
|
||||
}
|
||||
)
|
||||
auto_sync_enabled = forms.NullBooleanField(
|
||||
label=_('Auto sync enabled'),
|
||||
required=False,
|
||||
widget=forms.Select(
|
||||
choices=BOOLEAN_WITH_BLANK_CHOICES
|
||||
)
|
||||
)
|
||||
tag = TagFilterField(ConfigTemplate)
|
||||
mime_type = forms.CharField(
|
||||
required=False,
|
||||
|
||||
@@ -793,7 +793,7 @@ class JournalEntryForm(NetBoxModelForm):
|
||||
label=_('Kind'),
|
||||
choices=JournalEntryKindChoices
|
||||
)
|
||||
comments = CommentField()
|
||||
comments = CommentField(required=True)
|
||||
|
||||
class Meta:
|
||||
model = JournalEntry
|
||||
|
||||
@@ -535,6 +535,15 @@ class CustomField(CloningMixin, ExportTemplatesMixin, ChangeLoggedModel):
|
||||
# URL
|
||||
elif self.type == CustomFieldTypeChoices.TYPE_URL:
|
||||
field = LaxURLField(assume_scheme='https', required=required, initial=initial)
|
||||
if self.validation_regex:
|
||||
field.validators = [
|
||||
RegexValidator(
|
||||
regex=self.validation_regex,
|
||||
message=mark_safe(_("Values must match this regex: <code>{regex}</code>").format(
|
||||
regex=escape(self.validation_regex)
|
||||
))
|
||||
)
|
||||
]
|
||||
|
||||
# JSON
|
||||
elif self.type == CustomFieldTypeChoices.TYPE_JSON:
|
||||
@@ -684,6 +693,13 @@ class CustomField(CloningMixin, ExportTemplatesMixin, ChangeLoggedModel):
|
||||
if self.validation_regex and not re.match(self.validation_regex, value):
|
||||
raise ValidationError(_("Value must match regex '{regex}'").format(regex=self.validation_regex))
|
||||
|
||||
# Validate URL field
|
||||
elif self.type == CustomFieldTypeChoices.TYPE_URL:
|
||||
if type(value) is not str:
|
||||
raise ValidationError(_("Value must be a string."))
|
||||
if self.validation_regex and not re.match(self.validation_regex, value):
|
||||
raise ValidationError(_("Value must match regex '{regex}'").format(regex=self.validation_regex))
|
||||
|
||||
# Validate integer
|
||||
elif self.type == CustomFieldTypeChoices.TYPE_INTEGER:
|
||||
if type(value) is not int:
|
||||
|
||||
@@ -30,8 +30,7 @@ class CustomStoragesLoader(importlib.abc.Loader):
|
||||
return None # Use default module creation
|
||||
|
||||
def exec_module(self, module):
|
||||
storage = storages.create_storage(storages.backends["scripts"])
|
||||
with storage.open(self.filename, 'rb') as f:
|
||||
with storages["scripts"].open(self.filename, 'rb') as f:
|
||||
code = f.read()
|
||||
exec(code, module.__dict__)
|
||||
|
||||
|
||||
@@ -126,7 +126,7 @@ class ScriptModule(PythonModuleMixin, JobsMixin, ManagedFile):
|
||||
ordered.extend(script_objects.values())
|
||||
return ordered
|
||||
|
||||
@property
|
||||
@cached_property
|
||||
def module_scripts(self):
|
||||
|
||||
def _get_name(cls):
|
||||
|
||||
@@ -632,6 +632,10 @@ class ConfigTemplateTable(NetBoxTable):
|
||||
orderable=False,
|
||||
verbose_name=_('Synced')
|
||||
)
|
||||
auto_sync_enabled = columns.BooleanColumn(
|
||||
verbose_name=_('Auto Sync Enabled'),
|
||||
orderable=False,
|
||||
)
|
||||
mime_type = tables.Column(
|
||||
verbose_name=_('MIME Type')
|
||||
)
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
from django import template
|
||||
from django.utils.safestring import mark_safe
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
|
||||
register = template.Library()
|
||||
@@ -8,4 +10,16 @@ register = template.Library()
|
||||
def render_widget(context, widget):
|
||||
request = context['request']
|
||||
|
||||
return widget.render(request)
|
||||
try:
|
||||
return widget.render(request)
|
||||
except Exception as e:
|
||||
message1 = _('An error was encountered when attempting to render this widget:')
|
||||
message2 = _('Please try reconfiguring the widget, or remove it from your dashboard.')
|
||||
return mark_safe(f"""
|
||||
<p>
|
||||
<span class="text-danger"><i class="mdi mdi-alert"></i></span>
|
||||
{message1}
|
||||
</p>
|
||||
<p class="font-monospace ps-3">{e}</p>
|
||||
<p>{message2}</p>
|
||||
""")
|
||||
|
||||
@@ -3,6 +3,7 @@ import datetime
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.urls import reverse
|
||||
from django.utils.timezone import make_aware, now
|
||||
from rest_framework import status
|
||||
|
||||
from core.choices import ManagedFileRootPathChoices
|
||||
from core.events import *
|
||||
@@ -858,16 +859,16 @@ class ConfigTemplateTest(APIViewTestCases.APIViewTestCase):
|
||||
class ScriptTest(APITestCase):
|
||||
|
||||
class TestScriptClass(PythonClass):
|
||||
|
||||
class Meta:
|
||||
name = "Test script"
|
||||
name = 'Test script'
|
||||
commit = True
|
||||
scheduling_enabled = True
|
||||
|
||||
var1 = StringVar()
|
||||
var2 = IntegerVar()
|
||||
var3 = BooleanVar()
|
||||
|
||||
def run(self, data, commit=True):
|
||||
|
||||
self.log_info(data['var1'])
|
||||
self.log_success(data['var2'])
|
||||
self.log_failure(data['var3'])
|
||||
@@ -878,37 +879,104 @@ class ScriptTest(APITestCase):
|
||||
def setUpTestData(cls):
|
||||
module = ScriptModule.objects.create(
|
||||
file_root=ManagedFileRootPathChoices.SCRIPTS,
|
||||
file_path='/var/tmp/script.py'
|
||||
file_path='script.py',
|
||||
)
|
||||
Script.objects.create(
|
||||
script = Script.objects.create(
|
||||
module=module,
|
||||
name="Test script",
|
||||
name='Test script',
|
||||
is_executable=True,
|
||||
)
|
||||
cls.url = reverse('extras-api:script-detail', kwargs={'pk': script.pk})
|
||||
|
||||
@property
|
||||
def python_class(self):
|
||||
return self.TestScriptClass
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.add_permissions('extras.view_script')
|
||||
|
||||
# Monkey-patch the Script model to return our TestScriptClass above
|
||||
Script.python_class = self.python_class
|
||||
|
||||
def test_get_script(self):
|
||||
module = ScriptModule.objects.get(
|
||||
file_root=ManagedFileRootPathChoices.SCRIPTS,
|
||||
file_path='/var/tmp/script.py'
|
||||
)
|
||||
script = module.scripts.all().first()
|
||||
url = reverse('extras-api:script-detail', kwargs={'pk': script.pk})
|
||||
response = self.client.get(url, **self.header)
|
||||
response = self.client.get(self.url, **self.header)
|
||||
|
||||
self.assertEqual(response.data['name'], self.TestScriptClass.Meta.name)
|
||||
self.assertEqual(response.data['vars']['var1'], 'StringVar')
|
||||
self.assertEqual(response.data['vars']['var2'], 'IntegerVar')
|
||||
self.assertEqual(response.data['vars']['var3'], 'BooleanVar')
|
||||
|
||||
def test_schedule_script_past_time_rejected(self):
|
||||
"""
|
||||
Scheduling with past schedule_at should fail.
|
||||
"""
|
||||
self.add_permissions('extras.run_script')
|
||||
|
||||
payload = {
|
||||
'data': {'var1': 'hello', 'var2': 1, 'var3': False},
|
||||
'commit': True,
|
||||
'schedule_at': now() - datetime.timedelta(hours=1),
|
||||
}
|
||||
response = self.client.post(self.url, payload, format='json', **self.header)
|
||||
|
||||
self.assertHttpStatus(response, status.HTTP_400_BAD_REQUEST)
|
||||
self.assertIn('schedule_at', response.data)
|
||||
# Be tolerant of exact wording but ensure we failed on schedule_at being in the past
|
||||
self.assertIn('future', str(response.data['schedule_at']).lower())
|
||||
|
||||
def test_schedule_script_interval_only(self):
|
||||
"""
|
||||
Interval without schedule_at should auto-set schedule_at now.
|
||||
"""
|
||||
self.add_permissions('extras.run_script')
|
||||
|
||||
payload = {
|
||||
'data': {'var1': 'hello', 'var2': 1, 'var3': False},
|
||||
'commit': True,
|
||||
'interval': 60,
|
||||
}
|
||||
response = self.client.post(self.url, payload, format='json', **self.header)
|
||||
|
||||
self.assertHttpStatus(response, status.HTTP_200_OK)
|
||||
# The latest job is returned in the script detail serializer under "result"
|
||||
self.assertIn('result', response.data)
|
||||
self.assertEqual(response.data['result']['interval'], 60)
|
||||
# Ensure a start time was autopopulated
|
||||
self.assertIsNotNone(response.data['result']['scheduled'])
|
||||
|
||||
def test_schedule_script_when_disabled(self):
|
||||
"""
|
||||
Scheduling should fail when script.scheduling_enabled=False.
|
||||
"""
|
||||
self.add_permissions('extras.run_script')
|
||||
|
||||
# Temporarily disable scheduling on the in-test Python class
|
||||
original = getattr(self.TestScriptClass.Meta, 'scheduling_enabled', True)
|
||||
self.TestScriptClass.Meta.scheduling_enabled = False
|
||||
base = {
|
||||
'data': {'var1': 'hello', 'var2': 1, 'var3': False},
|
||||
'commit': True,
|
||||
}
|
||||
# Check both schedule_at and interval paths
|
||||
cases = [
|
||||
{**base, 'schedule_at': now() + datetime.timedelta(minutes=5)},
|
||||
{**base, 'interval': 60},
|
||||
]
|
||||
try:
|
||||
for case in cases:
|
||||
with self.subTest(case=list(case.keys())):
|
||||
response = self.client.post(self.url, case, format='json', **self.header)
|
||||
|
||||
self.assertHttpStatus(response, status.HTTP_400_BAD_REQUEST)
|
||||
# Error should be attached to whichever field we used
|
||||
key = 'schedule_at' if 'schedule_at' in case else 'interval'
|
||||
self.assertIn(key, response.data)
|
||||
self.assertIn('scheduling is not enabled', str(response.data[key]).lower())
|
||||
finally:
|
||||
# Restore the original setting for other tests
|
||||
self.TestScriptClass.Meta.scheduling_enabled = original
|
||||
|
||||
|
||||
class CreatedUpdatedFilterTest(APITestCase):
|
||||
|
||||
|
||||
@@ -1300,6 +1300,28 @@ class CustomFieldAPITest(APITestCase):
|
||||
response = self.client.patch(url, data, format='json', **self.header)
|
||||
self.assertHttpStatus(response, status.HTTP_200_OK)
|
||||
|
||||
def test_url_regex_validation(self):
|
||||
"""
|
||||
Test that validation_regex is applied to URL custom fields (fixes #20498).
|
||||
"""
|
||||
site2 = Site.objects.get(name='Site 2')
|
||||
url = reverse('dcim-api:site-detail', kwargs={'pk': site2.pk})
|
||||
self.add_permissions('dcim.change_site')
|
||||
|
||||
cf_url = CustomField.objects.get(name='url_field')
|
||||
cf_url.validation_regex = r'^https://' # Require HTTPS
|
||||
cf_url.save()
|
||||
|
||||
# Test invalid URL (http instead of https)
|
||||
data = {'custom_fields': {'url_field': 'http://example.com'}}
|
||||
response = self.client.patch(url, data, format='json', **self.header)
|
||||
self.assertHttpStatus(response, status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
# Test valid URL (https)
|
||||
data = {'custom_fields': {'url_field': 'https://example.com'}}
|
||||
response = self.client.patch(url, data, format='json', **self.header)
|
||||
self.assertHttpStatus(response, status.HTTP_200_OK)
|
||||
|
||||
def test_uniqueness_validation(self):
|
||||
# Create a unique custom field
|
||||
cf_text = CustomField.objects.get(name='text_field')
|
||||
|
||||
@@ -1,11 +1,14 @@
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.urls import reverse
|
||||
from django.test import tag
|
||||
|
||||
from core.choices import ManagedFileRootPathChoices
|
||||
from core.events import *
|
||||
from core.models import ObjectType
|
||||
from dcim.models import DeviceType, Manufacturer, Site
|
||||
from extras.choices import *
|
||||
from extras.models import *
|
||||
from extras.scripts import Script as PythonClass, IntegerVar, BooleanVar
|
||||
from users.models import Group, User
|
||||
from utilities.testing import ViewTestCases, TestCase
|
||||
|
||||
@@ -897,3 +900,70 @@ class ScriptListViewTest(TestCase):
|
||||
response = self.client.get(url, {'embedded': 'true'})
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertTemplateUsed(response, 'extras/inc/script_list_content.html')
|
||||
|
||||
|
||||
class ScriptValidationErrorTest(TestCase):
|
||||
user_permissions = ['extras.view_script', 'extras.run_script']
|
||||
|
||||
class TestScriptMixin:
|
||||
bar = IntegerVar(min_value=0, max_value=30, default=30)
|
||||
|
||||
class TestScriptClass(TestScriptMixin, PythonClass):
|
||||
class Meta:
|
||||
name = 'Test script'
|
||||
commit_default = False
|
||||
fieldsets = (("Logging", ("debug_mode",)),)
|
||||
|
||||
debug_mode = BooleanVar(default=False)
|
||||
|
||||
def run(self, data, commit):
|
||||
return "Complete"
|
||||
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
module = ScriptModule.objects.create(file_root=ManagedFileRootPathChoices.SCRIPTS, file_path='test_script.py')
|
||||
cls.script = Script.objects.create(module=module, name='Test script', is_executable=True)
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
Script.python_class = property(lambda self: ScriptValidationErrorTest.TestScriptClass)
|
||||
|
||||
@tag('regression')
|
||||
def test_script_validation_error_displays_message(self):
|
||||
from unittest.mock import patch
|
||||
|
||||
url = reverse('extras:script', kwargs={'pk': self.script.pk})
|
||||
|
||||
with patch('extras.views.get_workers_for_queue', return_value=['worker']):
|
||||
response = self.client.post(url, {'debug_mode': 'true', '_commit': 'true'})
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
messages = list(response.context['messages'])
|
||||
self.assertEqual(len(messages), 1)
|
||||
self.assertEqual(str(messages[0]), "bar: This field is required.")
|
||||
|
||||
@tag('regression')
|
||||
def test_script_validation_error_no_toast_for_fieldset_fields(self):
|
||||
from unittest.mock import patch, PropertyMock
|
||||
|
||||
class FieldsetScript(PythonClass):
|
||||
class Meta:
|
||||
name = 'Fieldset test'
|
||||
commit_default = False
|
||||
fieldsets = (("Fields", ("required_field",)),)
|
||||
|
||||
required_field = IntegerVar(min_value=10)
|
||||
|
||||
def run(self, data, commit):
|
||||
return "Complete"
|
||||
|
||||
url = reverse('extras:script', kwargs={'pk': self.script.pk})
|
||||
|
||||
with patch.object(Script, 'python_class', new_callable=PropertyMock) as mock_python_class:
|
||||
mock_python_class.return_value = FieldsetScript
|
||||
with patch('extras.views.get_workers_for_queue', return_value=['worker']):
|
||||
response = self.client.post(url, {'required_field': '5', '_commit': 'true'})
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
messages = list(response.context['messages'])
|
||||
self.assertEqual(len(messages), 0)
|
||||
|
||||
@@ -4,7 +4,7 @@ from django.contrib.auth.mixins import LoginRequiredMixin
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.core.paginator import EmptyPage
|
||||
from django.db.models import Count, Q
|
||||
from django.http import HttpResponseBadRequest, HttpResponseForbidden, HttpResponse
|
||||
from django.http import HttpResponseBadRequest, HttpResponseForbidden, HttpResponse, Http404
|
||||
from django.shortcuts import get_object_or_404, redirect, render
|
||||
from django.urls import reverse
|
||||
from django.utils import timezone
|
||||
@@ -25,7 +25,7 @@ from netbox.object_actions import *
|
||||
from netbox.views import generic
|
||||
from netbox.views.generic.mixins import TableMixin
|
||||
from utilities.forms import ConfirmationForm, get_field_value
|
||||
from utilities.htmx import htmx_partial
|
||||
from utilities.htmx import htmx_partial, htmx_maybe_redirect_current_page
|
||||
from utilities.paginator import EnhancedPaginator, get_paginate_count
|
||||
from utilities.query import count_related
|
||||
from utilities.querydict import normalize_querydict
|
||||
@@ -101,6 +101,7 @@ class CustomFieldBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(CustomField, 'bulk_rename', path='rename', detail=False)
|
||||
class CustomFieldBulkRenameView(generic.BulkRenameView):
|
||||
queryset = CustomField.objects.all()
|
||||
filterset = filtersets.CustomFieldFilterSet
|
||||
|
||||
|
||||
@register_model_view(CustomField, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -175,6 +176,7 @@ class CustomFieldChoiceSetBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(CustomFieldChoiceSet, 'bulk_rename', path='rename', detail=False)
|
||||
class CustomFieldChoiceSetBulkRenameView(generic.BulkRenameView):
|
||||
queryset = CustomFieldChoiceSet.objects.all()
|
||||
filterset = filtersets.CustomFieldChoiceSetFilterSet
|
||||
|
||||
|
||||
@register_model_view(CustomFieldChoiceSet, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -230,6 +232,7 @@ class CustomLinkBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(CustomLink, 'bulk_rename', path='rename', detail=False)
|
||||
class CustomLinkBulkRenameView(generic.BulkRenameView):
|
||||
queryset = CustomLink.objects.all()
|
||||
filterset = filtersets.CustomLinkFilterSet
|
||||
|
||||
|
||||
@register_model_view(CustomLink, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -286,6 +289,7 @@ class ExportTemplateBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(ExportTemplate, 'bulk_rename', path='rename', detail=False)
|
||||
class ExportTemplateBulkRenameView(generic.BulkRenameView):
|
||||
queryset = ExportTemplate.objects.all()
|
||||
filterset = filtersets.ExportTemplateFilterSet
|
||||
|
||||
|
||||
@register_model_view(ExportTemplate, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -351,6 +355,7 @@ class SavedFilterBulkEditView(SharedObjectViewMixin, generic.BulkEditView):
|
||||
@register_model_view(SavedFilter, 'bulk_rename', path='rename', detail=False)
|
||||
class SavedFilterBulkRenameView(generic.BulkRenameView):
|
||||
queryset = SavedFilter.objects.all()
|
||||
filterset = filtersets.SavedFilterFilterSet
|
||||
|
||||
|
||||
@register_model_view(SavedFilter, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -413,6 +418,7 @@ class TableConfigBulkEditView(SharedObjectViewMixin, generic.BulkEditView):
|
||||
@register_model_view(TableConfig, 'bulk_rename', path='rename', detail=False)
|
||||
class TableConfigBulkRenameView(generic.BulkRenameView):
|
||||
queryset = TableConfig.objects.all()
|
||||
filterset = filtersets.TableConfigFilterSet
|
||||
|
||||
|
||||
@register_model_view(TableConfig, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -499,6 +505,7 @@ class NotificationGroupBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(NotificationGroup, 'bulk_rename', path='rename', detail=False)
|
||||
class NotificationGroupBulkRenameView(generic.BulkRenameView):
|
||||
queryset = NotificationGroup.objects.all()
|
||||
filterset = filtersets.NotificationGroupFilterSet
|
||||
|
||||
|
||||
@register_model_view(NotificationGroup, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -518,8 +525,9 @@ class NotificationsView(LoginRequiredMixin, View):
|
||||
"""
|
||||
def get(self, request):
|
||||
return render(request, 'htmx/notifications.html', {
|
||||
'notifications': request.user.notifications.unread(),
|
||||
'notifications': request.user.notifications.unread()[:10],
|
||||
'total_count': request.user.notifications.count(),
|
||||
'unread_count': request.user.notifications.unread().count(),
|
||||
})
|
||||
|
||||
|
||||
@@ -528,6 +536,7 @@ class NotificationReadView(LoginRequiredMixin, View):
|
||||
"""
|
||||
Mark the Notification read and redirect the user to its attached object.
|
||||
"""
|
||||
|
||||
def get(self, request, pk):
|
||||
# Mark the Notification as read
|
||||
notification = get_object_or_404(request.user.notifications, pk=pk)
|
||||
@@ -541,18 +550,48 @@ class NotificationReadView(LoginRequiredMixin, View):
|
||||
return redirect('account:notifications')
|
||||
|
||||
|
||||
@register_model_view(Notification, name='dismiss_all', path='dismiss-all', detail=False)
|
||||
class NotificationDismissAllView(LoginRequiredMixin, View):
|
||||
"""
|
||||
Convenience view to clear all *unread* notifications for the current user.
|
||||
"""
|
||||
|
||||
def get(self, request):
|
||||
request.user.notifications.unread().delete()
|
||||
if htmx_partial(request):
|
||||
# If a user is currently on the notification page, redirect there (full repaint)
|
||||
redirect_resp = htmx_maybe_redirect_current_page(request, 'account:notifications', preserve_query=True)
|
||||
if redirect_resp:
|
||||
return redirect_resp
|
||||
|
||||
return render(request, 'htmx/notifications.html', {
|
||||
'notifications': request.user.notifications.unread()[:10],
|
||||
'total_count': request.user.notifications.count(),
|
||||
'unread_count': request.user.notifications.unread().count(),
|
||||
})
|
||||
return redirect('account:notifications')
|
||||
|
||||
|
||||
@register_model_view(Notification, 'dismiss')
|
||||
class NotificationDismissView(LoginRequiredMixin, View):
|
||||
"""
|
||||
A convenience view which allows deleting notifications with one click.
|
||||
"""
|
||||
|
||||
def get(self, request, pk):
|
||||
notification = get_object_or_404(request.user.notifications, pk=pk)
|
||||
notification.delete()
|
||||
|
||||
if htmx_partial(request):
|
||||
# If a user is currently on the notification page, redirect there (full repaint)
|
||||
redirect_resp = htmx_maybe_redirect_current_page(request, 'account:notifications', preserve_query=True)
|
||||
if redirect_resp:
|
||||
return redirect_resp
|
||||
|
||||
return render(request, 'htmx/notifications.html', {
|
||||
'notifications': request.user.notifications.unread()[:10],
|
||||
'total_count': request.user.notifications.count(),
|
||||
'unread_count': request.user.notifications.unread().count(),
|
||||
})
|
||||
|
||||
return redirect('account:notifications')
|
||||
@@ -650,6 +689,7 @@ class WebhookBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(Webhook, 'bulk_rename', path='rename', detail=False)
|
||||
class WebhookBulkRenameView(generic.BulkRenameView):
|
||||
queryset = Webhook.objects.all()
|
||||
filterset = filtersets.WebhookFilterSet
|
||||
|
||||
|
||||
@register_model_view(Webhook, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -705,6 +745,7 @@ class EventRuleBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(EventRule, 'bulk_rename', path='rename', detail=False)
|
||||
class EventRuleBulkRenameView(generic.BulkRenameView):
|
||||
queryset = EventRule.objects.all()
|
||||
filterset = filtersets.EventRuleFilterSet
|
||||
|
||||
|
||||
@register_model_view(EventRule, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -841,6 +882,7 @@ class ConfigContextProfileBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(ConfigContextProfile, 'bulk_rename', path='rename', detail=False)
|
||||
class ConfigContextProfileBulkRenameView(generic.BulkRenameView):
|
||||
queryset = ConfigContextProfile.objects.all()
|
||||
filterset = filtersets.ConfigContextProfileFilterSet
|
||||
|
||||
|
||||
@register_model_view(ConfigContextProfile, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -929,6 +971,7 @@ class ConfigContextBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(ConfigContext, 'bulk_rename', path='rename', detail=False)
|
||||
class ConfigContextBulkRenameView(generic.BulkRenameView):
|
||||
queryset = ConfigContext.objects.all()
|
||||
filterset = filtersets.ConfigContextFilterSet
|
||||
|
||||
|
||||
@register_model_view(ConfigContext, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -1020,6 +1063,7 @@ class ConfigTemplateBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(ConfigTemplate, 'bulk_rename', path='rename', detail=False)
|
||||
class ConfigTemplateBulkRenameView(generic.BulkRenameView):
|
||||
queryset = ConfigTemplate.objects.all()
|
||||
filterset = filtersets.ConfigTemplateFilterSet
|
||||
|
||||
|
||||
@register_model_view(ConfigTemplate, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -1143,6 +1187,7 @@ class ImageAttachmentBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(ImageAttachment, 'bulk_rename', path='rename', detail=False)
|
||||
class ImageAttachmentBulkRenameView(generic.BulkRenameView):
|
||||
queryset = ImageAttachment.objects.all()
|
||||
filterset = filtersets.ImageAttachmentFilterSet
|
||||
|
||||
|
||||
@register_model_view(ImageAttachment, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -1485,6 +1530,15 @@ class ScriptView(BaseScriptView):
|
||||
)
|
||||
|
||||
return redirect('extras:script_result', job_pk=job.pk)
|
||||
else:
|
||||
fieldset_fields = {field for _, fields in script_class.get_fieldsets() for field in fields}
|
||||
hidden_errors = {
|
||||
field: errors for field, errors in form.errors.items()
|
||||
if field not in fieldset_fields
|
||||
}
|
||||
if hidden_errors:
|
||||
error_msg = '; '.join(f"{field}: {', '.join(errors)}" for field, errors in hidden_errors.items())
|
||||
messages.error(request, error_msg)
|
||||
|
||||
return render(request, 'extras/script.html', {
|
||||
'object': script,
|
||||
|
||||
@@ -369,6 +369,20 @@ class IPAddressImportForm(NetBoxModelImportForm):
|
||||
**{f"virtual_machine__{self.fields['virtual_machine'].to_field_name}": data['virtual_machine']}
|
||||
)
|
||||
|
||||
def clean_is_primary(self):
|
||||
# Make sure is_primary is None when it's not included in the uploaded data
|
||||
if 'is_primary' not in self.data:
|
||||
return None
|
||||
else:
|
||||
return self.cleaned_data['is_primary']
|
||||
|
||||
def clean_is_oob(self):
|
||||
# Make sure is_oob is None when it's not included in the uploaded data
|
||||
if 'is_oob' not in self.data:
|
||||
return None
|
||||
else:
|
||||
return self.cleaned_data['is_oob']
|
||||
|
||||
def clean(self):
|
||||
super().clean()
|
||||
|
||||
@@ -411,18 +425,18 @@ class IPAddressImportForm(NetBoxModelImportForm):
|
||||
ipaddress = super().save(*args, **kwargs)
|
||||
|
||||
# Set as primary for device/VM
|
||||
if self.cleaned_data.get('is_primary'):
|
||||
if self.cleaned_data.get('is_primary') is not None:
|
||||
parent = self.cleaned_data.get('device') or self.cleaned_data.get('virtual_machine')
|
||||
if self.instance.address.version == 4:
|
||||
parent.primary_ip4 = ipaddress
|
||||
parent.primary_ip4 = ipaddress if self.cleaned_data.get('is_primary') else None
|
||||
elif self.instance.address.version == 6:
|
||||
parent.primary_ip6 = ipaddress
|
||||
parent.primary_ip6 = ipaddress if self.cleaned_data.get('is_primary') else None
|
||||
parent.save()
|
||||
|
||||
# Set as OOB for device
|
||||
if self.cleaned_data.get('is_oob'):
|
||||
if self.cleaned_data.get('is_oob') is not None:
|
||||
parent = self.cleaned_data.get('device')
|
||||
parent.oob_ip = ipaddress
|
||||
parent.oob_ip = ipaddress if self.cleaned_data.get('is_oob') else None
|
||||
parent.save()
|
||||
|
||||
return ipaddress
|
||||
|
||||
@@ -79,12 +79,36 @@ class ASNRangeFilter(TenancyFilterMixin, OrganizationalModelFilterMixin):
|
||||
|
||||
@strawberry_django.filter_type(models.Aggregate, lookups=True)
|
||||
class AggregateFilter(ContactFilterMixin, TenancyFilterMixin, PrimaryModelFilterMixin):
|
||||
prefix: Annotated['PrefixFilter', strawberry.lazy('ipam.graphql.filters')] | None = strawberry_django.filter_field()
|
||||
prefix_id: ID | None = strawberry_django.filter_field()
|
||||
prefix: FilterLookup[str] | None = strawberry_django.filter_field()
|
||||
rir: Annotated['RIRFilter', strawberry.lazy('ipam.graphql.filters')] | None = strawberry_django.filter_field()
|
||||
rir_id: ID | None = strawberry_django.filter_field()
|
||||
date_added: DateFilterLookup[date] | None = strawberry_django.filter_field()
|
||||
|
||||
@strawberry_django.filter_field()
|
||||
def contains(self, value: list[str], prefix) -> Q:
|
||||
"""
|
||||
Return aggregates whose `prefix` contains any of the supplied networks.
|
||||
Mirrors PrefixFilter.contains but operates on the Aggregate.prefix field itself.
|
||||
"""
|
||||
if not value:
|
||||
return Q()
|
||||
q = Q()
|
||||
for subnet in value:
|
||||
try:
|
||||
query = str(netaddr.IPNetwork(subnet.strip()).cidr)
|
||||
except (AddrFormatError, ValueError):
|
||||
continue
|
||||
q |= Q(**{f"{prefix}prefix__net_contains": query})
|
||||
return q
|
||||
|
||||
@strawberry_django.filter_field()
|
||||
def family(
|
||||
self,
|
||||
value: Annotated['IPAddressFamilyEnum', strawberry.lazy('ipam.graphql.enums')],
|
||||
prefix,
|
||||
) -> Q:
|
||||
return Q(**{f"{prefix}prefix__family": value.value})
|
||||
|
||||
|
||||
@strawberry_django.filter_type(models.FHRPGroup, lookups=True)
|
||||
class FHRPGroupFilter(PrimaryModelFilterMixin):
|
||||
@@ -119,28 +143,28 @@ class FHRPGroupAssignmentFilter(BaseObjectTypeFilterMixin, ChangeLogFilterMixin)
|
||||
)
|
||||
|
||||
@strawberry_django.filter_field()
|
||||
def device_id(self, queryset, value: list[str], prefix) -> Q:
|
||||
return self.filter_device('id', value)
|
||||
def device_id(self, value: list[str], prefix) -> Q:
|
||||
return self.filter_device('id', value, prefix)
|
||||
|
||||
@strawberry_django.filter_field()
|
||||
def device(self, value: list[str], prefix) -> Q:
|
||||
return self.filter_device('name', value)
|
||||
return self.filter_device('name', value, prefix)
|
||||
|
||||
@strawberry_django.filter_field()
|
||||
def virtual_machine_id(self, value: list[str], prefix) -> Q:
|
||||
return Q(interface_id__in=VMInterface.objects.filter(virtual_machine_id__in=value))
|
||||
return Q(**{f"{prefix}interface_id__in": VMInterface.objects.filter(virtual_machine_id__in=value)})
|
||||
|
||||
@strawberry_django.filter_field()
|
||||
def virtual_machine(self, value: list[str], prefix) -> Q:
|
||||
return Q(interface_id__in=VMInterface.objects.filter(virtual_machine__name__in=value))
|
||||
return Q(**{f"{prefix}interface_id__in": VMInterface.objects.filter(virtual_machine__name__in=value)})
|
||||
|
||||
def filter_device(self, field, value) -> Q:
|
||||
def filter_device(self, field, value, prefix) -> Q:
|
||||
"""Helper to standardize logic for device and device_id filters"""
|
||||
devices = Device.objects.filter(**{f'{field}__in': value})
|
||||
interface_ids = []
|
||||
for device in devices:
|
||||
interface_ids.extend(device.vc_interfaces().values_list('id', flat=True))
|
||||
return Q(interface_id__in=interface_ids)
|
||||
return Q(**{f"{prefix}interface_id__in": interface_ids})
|
||||
|
||||
|
||||
@strawberry_django.filter_type(models.IPAddress, lookups=True)
|
||||
@@ -170,7 +194,7 @@ class IPAddressFilter(ContactFilterMixin, TenancyFilterMixin, PrimaryModelFilter
|
||||
|
||||
@strawberry_django.filter_field()
|
||||
def assigned(self, value: bool, prefix) -> Q:
|
||||
return Q(assigned_object_id__isnull=(not value))
|
||||
return Q(**{f"{prefix}assigned_object_id__isnull": not value})
|
||||
|
||||
@strawberry_django.filter_field()
|
||||
def parent(self, value: list[str], prefix) -> Q:
|
||||
@@ -180,9 +204,9 @@ class IPAddressFilter(ContactFilterMixin, TenancyFilterMixin, PrimaryModelFilter
|
||||
for subnet in value:
|
||||
try:
|
||||
query = str(netaddr.IPNetwork(subnet.strip()).cidr)
|
||||
q |= Q(address__net_host_contained=query)
|
||||
except (AddrFormatError, ValueError):
|
||||
return Q()
|
||||
continue
|
||||
q |= Q(**{f"{prefix}address__net_host_contained": query})
|
||||
return q
|
||||
|
||||
@strawberry_django.filter_field()
|
||||
@@ -217,9 +241,14 @@ class IPRangeFilter(ContactFilterMixin, TenancyFilterMixin, PrimaryModelFilterMi
|
||||
for subnet in value:
|
||||
try:
|
||||
query = str(netaddr.IPNetwork(subnet.strip()).cidr)
|
||||
q |= Q(start_address__net_host_contained=query, end_address__net_host_contained=query)
|
||||
except (AddrFormatError, ValueError):
|
||||
return Q()
|
||||
continue
|
||||
q |= Q(
|
||||
**{
|
||||
f"{prefix}start_address__net_host_contained": query,
|
||||
f"{prefix}end_address__net_host_contained": query,
|
||||
}
|
||||
)
|
||||
return q
|
||||
|
||||
@strawberry_django.filter_field()
|
||||
@@ -228,10 +257,17 @@ class IPRangeFilter(ContactFilterMixin, TenancyFilterMixin, PrimaryModelFilterMi
|
||||
return Q()
|
||||
q = Q()
|
||||
for subnet in value:
|
||||
net = netaddr.IPNetwork(subnet.strip())
|
||||
try:
|
||||
net = netaddr.IPNetwork(subnet.strip())
|
||||
query_start = str(netaddr.IPAddress(net.first))
|
||||
query_end = str(netaddr.IPAddress(net.last))
|
||||
except (AddrFormatError, ValueError):
|
||||
continue
|
||||
q |= Q(
|
||||
start_address__host__inet__lte=str(netaddr.IPAddress(net.first)),
|
||||
end_address__host__inet__gte=str(netaddr.IPAddress(net.last)),
|
||||
**{
|
||||
f"{prefix}start_address__host__inet__lte": query_start,
|
||||
f"{prefix}end_address__host__inet__gte": query_end,
|
||||
}
|
||||
)
|
||||
return q
|
||||
|
||||
@@ -257,10 +293,21 @@ class PrefixFilter(ContactFilterMixin, ScopedFilterMixin, TenancyFilterMixin, Pr
|
||||
return Q()
|
||||
q = Q()
|
||||
for subnet in value:
|
||||
query = str(netaddr.IPNetwork(subnet.strip()).cidr)
|
||||
q |= Q(prefix__net_contains=query)
|
||||
try:
|
||||
query = str(netaddr.IPNetwork(subnet.strip()).cidr)
|
||||
except (AddrFormatError, ValueError):
|
||||
continue
|
||||
q |= Q(**{f"{prefix}prefix__net_contains": query})
|
||||
return q
|
||||
|
||||
@strawberry_django.filter_field()
|
||||
def family(
|
||||
self,
|
||||
value: Annotated['IPAddressFamilyEnum', strawberry.lazy('ipam.graphql.enums')],
|
||||
prefix,
|
||||
) -> Q:
|
||||
return Q(**{f"{prefix}prefix__family": value.value})
|
||||
|
||||
|
||||
@strawberry_django.filter_type(models.RIR, lookups=True)
|
||||
class RIRFilter(OrganizationalModelFilterMixin):
|
||||
|
||||
@@ -0,0 +1,27 @@
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
def populate_vlangroup_total_vlan_ids(apps, schema_editor):
|
||||
VLANGroup = apps.get_model('ipam', 'VLANGroup')
|
||||
db_alias = schema_editor.connection.alias
|
||||
|
||||
vlan_groups = VLANGroup.objects.using(db_alias).only('id', 'vid_ranges')
|
||||
for group in vlan_groups:
|
||||
total_vlan_ids = 0
|
||||
if group.vid_ranges:
|
||||
for r in group.vid_ranges:
|
||||
# Half-open [lo, hi): length is (hi - lo).
|
||||
if r is not None and r.lower is not None and r.upper is not None:
|
||||
total_vlan_ids += r.upper - r.lower
|
||||
group._total_vlan_ids = total_vlan_ids
|
||||
VLANGroup.objects.using(db_alias).bulk_update(vlan_groups, ['_total_vlan_ids'], batch_size=100)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('ipam', '0082_add_prefix_network_containment_indexes'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(populate_vlangroup_total_vlan_ids, migrations.RunPython.noop),
|
||||
]
|
||||
@@ -132,7 +132,8 @@ class VLANGroup(OrganizationalModel):
|
||||
def save(self, *args, **kwargs):
|
||||
self._total_vlan_ids = 0
|
||||
for vid_range in self.vid_ranges:
|
||||
self._total_vlan_ids += vid_range.upper - vid_range.lower + 1
|
||||
# VID range is inclusive on lower-bound, exclusive on upper-bound
|
||||
self._total_vlan_ids += vid_range.upper - vid_range.lower
|
||||
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from ipam.models import *
|
||||
from netbox.tables import NetBoxTable, columns
|
||||
from tenancy.tables import TenancyColumnsMixin
|
||||
from tenancy.tables import ContactsColumnMixin, TenancyColumnsMixin
|
||||
|
||||
__all__ = (
|
||||
'ASNTable',
|
||||
@@ -36,7 +36,7 @@ class ASNRangeTable(TenancyColumnsMixin, NetBoxTable):
|
||||
default_columns = ('pk', 'name', 'rir', 'start', 'end', 'tenant', 'asn_count', 'description')
|
||||
|
||||
|
||||
class ASNTable(TenancyColumnsMixin, NetBoxTable):
|
||||
class ASNTable(TenancyColumnsMixin, ContactsColumnMixin, NetBoxTable):
|
||||
asn = tables.Column(
|
||||
verbose_name=_('ASN'),
|
||||
linkify=True
|
||||
@@ -76,7 +76,7 @@ class ASNTable(TenancyColumnsMixin, NetBoxTable):
|
||||
model = ASN
|
||||
fields = (
|
||||
'pk', 'asn', 'asn_asdot', 'rir', 'site_count', 'provider_count', 'tenant', 'tenant_group', 'description',
|
||||
'comments', 'sites', 'tags', 'created', 'last_updated', 'actions',
|
||||
'contacts', 'comments', 'sites', 'tags', 'created', 'last_updated', 'actions',
|
||||
)
|
||||
default_columns = (
|
||||
'pk', 'asn', 'rir', 'site_count', 'provider_count', 'sites', 'description', 'tenant',
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
import django_tables2 as tables
|
||||
from django.utils.safestring import mark_safe
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django_tables2.utils import Accessor
|
||||
|
||||
from ipam.models import *
|
||||
from netbox.tables import NetBoxTable, columns
|
||||
from tenancy.tables import TenancyColumnsMixin, TenantColumn
|
||||
from tenancy.tables import ContactsColumnMixin, TenancyColumnsMixin, TenantColumn
|
||||
from .template_code import *
|
||||
|
||||
__all__ = (
|
||||
@@ -58,7 +58,7 @@ class RIRTable(NetBoxTable):
|
||||
# Aggregates
|
||||
#
|
||||
|
||||
class AggregateTable(TenancyColumnsMixin, NetBoxTable):
|
||||
class AggregateTable(TenancyColumnsMixin, ContactsColumnMixin, NetBoxTable):
|
||||
prefix = tables.Column(
|
||||
linkify=True,
|
||||
verbose_name=_('Aggregate'),
|
||||
@@ -93,7 +93,7 @@ class AggregateTable(TenancyColumnsMixin, NetBoxTable):
|
||||
model = Aggregate
|
||||
fields = (
|
||||
'pk', 'id', 'prefix', 'rir', 'tenant', 'tenant_group', 'child_count', 'utilization', 'date_added',
|
||||
'description', 'comments', 'tags', 'created', 'last_updated',
|
||||
'description', 'contacts', 'comments', 'tags', 'created', 'last_updated',
|
||||
)
|
||||
default_columns = ('pk', 'prefix', 'rir', 'tenant', 'child_count', 'utilization', 'date_added', 'description')
|
||||
|
||||
@@ -154,7 +154,7 @@ class PrefixUtilizationColumn(columns.UtilizationColumn):
|
||||
"""
|
||||
|
||||
|
||||
class PrefixTable(TenancyColumnsMixin, NetBoxTable):
|
||||
class PrefixTable(TenancyColumnsMixin, ContactsColumnMixin, NetBoxTable):
|
||||
prefix = columns.TemplateColumn(
|
||||
verbose_name=_('Prefix'),
|
||||
template_code=PREFIX_LINK_WITH_DEPTH,
|
||||
@@ -237,8 +237,8 @@ class PrefixTable(TenancyColumnsMixin, NetBoxTable):
|
||||
model = Prefix
|
||||
fields = (
|
||||
'pk', 'id', 'prefix', 'prefix_flat', 'status', 'children', 'vrf', 'utilization', 'tenant', 'tenant_group',
|
||||
'scope', 'scope_type', 'vlan_group', 'vlan', 'role', 'is_pool', 'mark_utilized', 'description', 'comments',
|
||||
'tags', 'created', 'last_updated',
|
||||
'scope', 'scope_type', 'vlan_group', 'vlan', 'role', 'is_pool', 'mark_utilized', 'description', 'contacts',
|
||||
'comments', 'tags', 'created', 'last_updated',
|
||||
)
|
||||
default_columns = (
|
||||
'pk', 'prefix', 'status', 'children', 'vrf', 'utilization', 'tenant', 'scope', 'vlan', 'role',
|
||||
@@ -252,7 +252,7 @@ class PrefixTable(TenancyColumnsMixin, NetBoxTable):
|
||||
#
|
||||
# IP ranges
|
||||
#
|
||||
class IPRangeTable(TenancyColumnsMixin, NetBoxTable):
|
||||
class IPRangeTable(TenancyColumnsMixin, ContactsColumnMixin, NetBoxTable):
|
||||
start_address = tables.Column(
|
||||
verbose_name=_('Start address'),
|
||||
linkify=True
|
||||
@@ -293,8 +293,8 @@ class IPRangeTable(TenancyColumnsMixin, NetBoxTable):
|
||||
model = IPRange
|
||||
fields = (
|
||||
'pk', 'id', 'start_address', 'end_address', 'size', 'vrf', 'status', 'role', 'tenant', 'tenant_group',
|
||||
'mark_populated', 'mark_utilized', 'utilization', 'description', 'comments', 'tags', 'created',
|
||||
'last_updated',
|
||||
'mark_populated', 'mark_utilized', 'utilization', 'description', 'contacts', 'comments', 'tags',
|
||||
'created', 'last_updated',
|
||||
)
|
||||
default_columns = (
|
||||
'pk', 'start_address', 'end_address', 'size', 'vrf', 'status', 'role', 'tenant', 'description',
|
||||
@@ -308,7 +308,7 @@ class IPRangeTable(TenancyColumnsMixin, NetBoxTable):
|
||||
# IPAddresses
|
||||
#
|
||||
|
||||
class IPAddressTable(TenancyColumnsMixin, NetBoxTable):
|
||||
class IPAddressTable(TenancyColumnsMixin, ContactsColumnMixin, NetBoxTable):
|
||||
address = tables.TemplateColumn(
|
||||
template_code=IPADDRESS_LINK,
|
||||
verbose_name=_('IP Address')
|
||||
@@ -365,7 +365,7 @@ class IPAddressTable(TenancyColumnsMixin, NetBoxTable):
|
||||
model = IPAddress
|
||||
fields = (
|
||||
'pk', 'id', 'address', 'vrf', 'status', 'role', 'tenant', 'tenant_group', 'nat_inside', 'nat_outside',
|
||||
'assigned', 'dns_name', 'description', 'comments', 'tags', 'created', 'last_updated',
|
||||
'assigned', 'dns_name', 'description', 'comments', 'contacts', 'tags', 'created', 'last_updated',
|
||||
)
|
||||
default_columns = (
|
||||
'pk', 'address', 'vrf', 'status', 'role', 'tenant', 'assigned', 'dns_name', 'description',
|
||||
|
||||
@@ -3,6 +3,7 @@ import django_tables2 as tables
|
||||
|
||||
from ipam.models import *
|
||||
from netbox.tables import NetBoxTable, columns
|
||||
from tenancy.tables import ContactsColumnMixin
|
||||
|
||||
__all__ = (
|
||||
'ServiceTable',
|
||||
@@ -35,7 +36,7 @@ class ServiceTemplateTable(NetBoxTable):
|
||||
default_columns = ('pk', 'name', 'protocol', 'ports', 'description')
|
||||
|
||||
|
||||
class ServiceTable(NetBoxTable):
|
||||
class ServiceTable(ContactsColumnMixin, NetBoxTable):
|
||||
name = tables.Column(
|
||||
verbose_name=_('Name'),
|
||||
linkify=True
|
||||
@@ -60,7 +61,7 @@ class ServiceTable(NetBoxTable):
|
||||
class Meta(NetBoxTable.Meta):
|
||||
model = Service
|
||||
fields = (
|
||||
'pk', 'id', 'name', 'parent', 'protocol', 'ports', 'ipaddresses', 'description', 'comments', 'tags',
|
||||
'created', 'last_updated',
|
||||
'pk', 'id', 'name', 'parent', 'protocol', 'ports', 'ipaddresses', 'description', 'contacts', 'comments',
|
||||
'tags', 'created', 'last_updated',
|
||||
)
|
||||
default_columns = ('pk', 'name', 'parent', 'protocol', 'ports', 'description')
|
||||
|
||||
@@ -323,6 +323,55 @@ class AggregateTest(APIViewTestCases.APIViewTestCase):
|
||||
},
|
||||
]
|
||||
|
||||
@tag('regression')
|
||||
def test_graphql_aggregate_prefix_exact(self):
|
||||
"""
|
||||
Test case to verify aggregate prefix equality via field lookup in GraphQL API.
|
||||
"""
|
||||
|
||||
self.add_permissions('ipam.view_aggregate', 'ipam.view_rir')
|
||||
|
||||
rir = RIR.objects.create(name='RFC6598', slug='rfc6598', is_private=True)
|
||||
aggregate1 = Aggregate.objects.create(prefix='100.64.0.0/10', rir=rir)
|
||||
Aggregate.objects.create(prefix='203.0.113.0/24', rir=rir)
|
||||
|
||||
url = reverse('graphql')
|
||||
query = """{
|
||||
aggregate_list(filters: { prefix: { exact: "100.64.0.0/10" } }) { prefix }
|
||||
}"""
|
||||
response = self.client.post(url, data={'query': query}, format='json', **self.header)
|
||||
self.assertHttpStatus(response, status.HTTP_200_OK)
|
||||
data = response.json()
|
||||
self.assertNotIn('errors', data)
|
||||
|
||||
prefixes = {row['prefix'] for row in data['data']['aggregate_list']}
|
||||
self.assertIn(str(aggregate1.prefix), prefixes)
|
||||
|
||||
@tag('regression')
|
||||
def test_graphql_aggregate_contains_skips_invalid(self):
|
||||
"""
|
||||
Test the GraphQL API Aggregate `contains` filter skips invalid input.
|
||||
"""
|
||||
|
||||
self.add_permissions('ipam.view_aggregate', 'ipam.view_rir')
|
||||
|
||||
rir = RIR.objects.create(name='RIR 3', slug='rir-3', is_private=False)
|
||||
aggregate1 = Aggregate.objects.create(prefix='100.64.0.0/10', rir=rir)
|
||||
Aggregate.objects.create(prefix='203.0.113.0/24', rir=rir)
|
||||
|
||||
url = reverse('graphql')
|
||||
query = """{
|
||||
aggregate_list(filters: { contains: ["100.64.16.0/24", "not-a-cidr", ""] }) { prefix }
|
||||
}"""
|
||||
response = self.client.post(url, data={'query': query}, format='json', **self.header)
|
||||
self.assertHttpStatus(response, status.HTTP_200_OK)
|
||||
data = response.json()
|
||||
self.assertNotIn('errors', data)
|
||||
|
||||
prefixes = {row['prefix'] for row in data['data']['aggregate_list']}
|
||||
self.assertIn(str(aggregate1.prefix), prefixes)
|
||||
# No exception occurred; invalid entries were ignored
|
||||
|
||||
|
||||
class RoleTest(APIViewTestCases.APIViewTestCase):
|
||||
model = Role
|
||||
@@ -546,6 +595,30 @@ class PrefixTest(APIViewTestCases.APIViewTestCase):
|
||||
self.assertHttpStatus(response, status.HTTP_201_CREATED)
|
||||
self.assertEqual(len(response.data), 8)
|
||||
|
||||
@tag('regression')
|
||||
def test_graphql_tenant_prefixes_contains_nested_skips_invalid(self):
|
||||
"""
|
||||
Test the GraphQL API Tenant nested Prefix `contains` filter skips invalid input.
|
||||
"""
|
||||
|
||||
self.add_permissions('ipam.view_prefix', 'ipam.view_vrf', 'tenancy.view_tenant')
|
||||
|
||||
tenant = Tenant.objects.create(name='Tenant 1', slug='tenant-1')
|
||||
vrf = VRF.objects.create(name='Test VRF 1', rd='64512:1')
|
||||
Prefix.objects.create(prefix='10.20.0.0/16', vrf=vrf, tenant=tenant)
|
||||
Prefix.objects.create(prefix='198.51.100.0/24', vrf=vrf) # non-tenant
|
||||
|
||||
url = reverse('graphql')
|
||||
query = """{
|
||||
tenant_list(filters: { prefixes: { contains: ["10.20.1.0/24", "not-a-cidr"] } }) { id }
|
||||
}"""
|
||||
response = self.client.post(url, data={'query': query}, format='json', **self.header)
|
||||
self.assertHttpStatus(response, status.HTTP_200_OK)
|
||||
data = response.json()
|
||||
self.assertNotIn('errors', data)
|
||||
|
||||
self.assertTrue(data['data']['tenant_list']) # tenant returned
|
||||
|
||||
|
||||
class IPRangeTest(APIViewTestCases.APIViewTestCase):
|
||||
model = IPRange
|
||||
@@ -645,6 +718,65 @@ class IPRangeTest(APIViewTestCases.APIViewTestCase):
|
||||
self.assertHttpStatus(response, status.HTTP_201_CREATED)
|
||||
self.assertEqual(len(response.data), 8)
|
||||
|
||||
@tag('regression')
|
||||
def test_graphql_tenant_ip_ranges_parent_nested_skips_invalid(self):
|
||||
"""
|
||||
Test the GraphQL API Tenant nested IP Range `parent` filter skips invalid input.
|
||||
"""
|
||||
|
||||
self.add_permissions('tenancy.view_tenant', 'ipam.view_iprange', 'ipam.view_vrf')
|
||||
|
||||
tenant = Tenant.objects.create(name='Tenant 1', slug='tenant-1')
|
||||
vrf = VRF.objects.create(name='Test VRF 1', rd='64512:1')
|
||||
IPRange.objects.create(
|
||||
start_address=IPNetwork('10.30.0.1/24'), end_address=IPNetwork('10.30.0.255/24'), vrf=vrf, tenant=tenant
|
||||
)
|
||||
IPRange.objects.create(
|
||||
start_address=IPNetwork('10.31.0.1/24'), end_address=IPNetwork('10.31.0.255/24'), vrf=vrf, tenant=tenant
|
||||
)
|
||||
|
||||
url = reverse('graphql')
|
||||
query = """{
|
||||
tenant_list(filters: {
|
||||
name: { exact: "Tenant 1" }
|
||||
ip_ranges: { parent: ["10.30.0.0/24", "bogus"] }
|
||||
}) { id }
|
||||
}"""
|
||||
response = self.client.post(url, data={'query': query}, format='json', **self.header)
|
||||
self.assertHttpStatus(response, status.HTTP_200_OK)
|
||||
data = response.json()
|
||||
self.assertNotIn('errors', data)
|
||||
self.assertTrue(data['data']['tenant_list']) # tenant returned
|
||||
# No exception occurred; invalid entries were ignored
|
||||
|
||||
@tag('regression')
|
||||
def test_graphql_tenant_ip_ranges_contains_nested_skips_invalid(self):
|
||||
"""
|
||||
Test the GraphQL API Tenant nested IP Range `contains` filter skips invalid input.
|
||||
"""
|
||||
|
||||
self.add_permissions('tenancy.view_tenant', 'ipam.view_iprange', 'ipam.view_vrf')
|
||||
|
||||
tenant = Tenant.objects.create(name='Tenant 2', slug='tenant-2')
|
||||
vrf = VRF.objects.create(name='Test VRF 1', rd='64512:2')
|
||||
IPRange.objects.create(
|
||||
start_address=IPNetwork('10.40.0.1/24'), end_address=IPNetwork('10.40.0.255/24'), vrf=vrf, tenant=tenant
|
||||
)
|
||||
|
||||
url = reverse('graphql')
|
||||
query = """{
|
||||
tenant_list(filters: {
|
||||
name: { exact: "Tenant 2" }
|
||||
ip_ranges: { contains: ["10.40.0.128/25", "###"] }
|
||||
}) { id }
|
||||
}"""
|
||||
response = self.client.post(url, data={'query': query}, format='json', **self.header)
|
||||
self.assertHttpStatus(response, status.HTTP_200_OK)
|
||||
data = response.json()
|
||||
self.assertNotIn('errors', data)
|
||||
self.assertTrue(data['data']['tenant_list']) # tenant returned
|
||||
# No exception occurred; invalid entries were ignored
|
||||
|
||||
|
||||
class IPAddressTest(APIViewTestCases.APIViewTestCase):
|
||||
model = IPAddress
|
||||
@@ -731,6 +863,75 @@ class IPAddressTest(APIViewTestCases.APIViewTestCase):
|
||||
response = self.client.patch(url, data, format='json', **self.header)
|
||||
self.assertHttpStatus(response, status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
@tag('regression')
|
||||
def test_graphql_device_primary_ip4_assigned_nested(self):
|
||||
"""
|
||||
Test the GraphQL API Device nested IP Address `primary_ip4` filter.
|
||||
"""
|
||||
|
||||
self.add_permissions('dcim.view_device', 'dcim.view_interface', 'ipam.view_ipaddress')
|
||||
|
||||
site = Site.objects.create(name='Site 1')
|
||||
manufacturer = Manufacturer.objects.create(name='Manufacturer 1')
|
||||
device_type = DeviceType.objects.create(model='Device Type 1', manufacturer=manufacturer)
|
||||
role = DeviceRole.objects.create(name='Switch')
|
||||
|
||||
device1 = Device.objects.create(name='Device 1', site=site, device_type=device_type, role=role, status='active')
|
||||
interface1 = Interface.objects.create(name='Interface 1', device=device1, type='1000baset')
|
||||
ip1 = IPAddress.objects.create(address='10.0.0.1/24')
|
||||
ip1.assigned_object = interface1
|
||||
ip1.save()
|
||||
device1.primary_ip4 = ip1
|
||||
device1.save()
|
||||
|
||||
device2 = Device.objects.create(name='Device 2', site=site, device_type=device_type, role=role, status='active')
|
||||
|
||||
url = reverse('graphql')
|
||||
query = """{
|
||||
device_list(filters: { primary_ip4: { assigned: true } }) { id name }
|
||||
}"""
|
||||
response = self.client.post(url, data={'query': query}, format='json', **self.header)
|
||||
self.assertHttpStatus(response, status.HTTP_200_OK)
|
||||
data = response.json()
|
||||
self.assertNotIn('errors', data)
|
||||
|
||||
ids = {row['id'] for row in data['data']['device_list']}
|
||||
self.assertIn(str(device1.pk), ids)
|
||||
self.assertNotIn(str(device2.pk), ids)
|
||||
|
||||
@tag('regression')
|
||||
def test_graphql_device_primary_ip4_parent_nested_skips_invalid(self):
|
||||
"""
|
||||
Test the GraphQL API Device nested IP Address `parent` filter skips invalid input.
|
||||
"""
|
||||
|
||||
self.add_permissions('dcim.view_device', 'dcim.view_interface', 'ipam.view_ipaddress')
|
||||
|
||||
site = Site.objects.create(name='Site 1')
|
||||
manufacturer = Manufacturer.objects.create(name='Manufacturer 1')
|
||||
device_type = DeviceType.objects.create(model='Device Type 1', manufacturer=manufacturer)
|
||||
role = DeviceRole.objects.create(name='Switch')
|
||||
|
||||
device1 = Device.objects.create(name='Device 1', site=site, device_type=device_type, role=role, status='active')
|
||||
interface1 = Interface.objects.create(name='Interface 1', device=device1, type='1000baset')
|
||||
ip1 = IPAddress.objects.create(address='192.0.2.10/24')
|
||||
ip1.assigned_object = interface1
|
||||
ip1.save()
|
||||
device1.primary_ip4 = ip1
|
||||
device1.save()
|
||||
|
||||
url = reverse('graphql')
|
||||
query = """{
|
||||
device_list(filters: { primary_ip4: { parent: ["192.0.2.0/24", "bad-cidr"] } }) { id }
|
||||
}"""
|
||||
response = self.client.post(url, data={'query': query}, format='json', **self.header)
|
||||
self.assertHttpStatus(response, status.HTTP_200_OK)
|
||||
data = response.json()
|
||||
self.assertNotIn('errors', data)
|
||||
|
||||
ids = {row['id'] for row in data['data']['device_list']}
|
||||
self.assertIn(str(device1.pk), ids)
|
||||
|
||||
|
||||
class FHRPGroupTest(APIViewTestCases.APIViewTestCase):
|
||||
model = FHRPGroup
|
||||
|
||||
@@ -661,6 +661,10 @@ class TestVLANGroup(TestCase):
|
||||
vlangroup.full_clean()
|
||||
vlangroup.save()
|
||||
|
||||
def test_total_vlan_ids(self):
|
||||
vlangroup = VLANGroup.objects.first()
|
||||
self.assertEqual(vlangroup._total_vlan_ids, 100)
|
||||
|
||||
|
||||
class TestVLAN(TestCase):
|
||||
|
||||
|
||||
@@ -108,6 +108,7 @@ class VRFBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(VRF, 'bulk_rename', path='rename', detail=False)
|
||||
class VRFBulkRenameView(generic.BulkRenameView):
|
||||
queryset = VRF.objects.all()
|
||||
filterset = filtersets.VRFFilterSet
|
||||
|
||||
|
||||
@register_model_view(VRF, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -163,6 +164,7 @@ class RouteTargetBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(RouteTarget, 'bulk_rename', path='rename', detail=False)
|
||||
class RouteTargetBulkRenameView(generic.BulkRenameView):
|
||||
queryset = RouteTarget.objects.all()
|
||||
filterset = filtersets.RouteTargetFilterSet
|
||||
|
||||
|
||||
@register_model_view(RouteTarget, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -227,6 +229,7 @@ class RIRBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(RIR, 'bulk_rename', path='rename', detail=False)
|
||||
class RIRBulkRenameView(generic.BulkRenameView):
|
||||
queryset = RIR.objects.all()
|
||||
filterset = filtersets.RIRFilterSet
|
||||
|
||||
|
||||
@register_model_view(RIR, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -305,6 +308,7 @@ class ASNRangeBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(ASNRange, 'bulk_rename', path='rename', detail=False)
|
||||
class ASNRangeBulkRenameView(generic.BulkRenameView):
|
||||
queryset = ASNRange.objects.all()
|
||||
filterset = filtersets.ASNRangeFilterSet
|
||||
|
||||
|
||||
@register_model_view(ASNRange, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -377,6 +381,7 @@ class ASNBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(ASN, 'bulk_rename', path='rename', detail=False)
|
||||
class ASNBulkRenameView(generic.BulkRenameView):
|
||||
queryset = ASN.objects.all()
|
||||
filterset = filtersets.ASNFilterSet
|
||||
|
||||
|
||||
@register_model_view(ASN, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -536,6 +541,7 @@ class RoleBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(Role, 'bulk_rename', path='rename', detail=False)
|
||||
class RoleBulkRenameView(generic.BulkRenameView):
|
||||
queryset = Role.objects.all()
|
||||
filterset = filtersets.RoleFilterSet
|
||||
|
||||
|
||||
@register_model_view(Role, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -820,6 +826,7 @@ class IPRangeBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(IPRange, 'bulk_rename', path='rename', detail=False)
|
||||
class IPRangeBulkRenameView(generic.BulkRenameView):
|
||||
queryset = IPRange.objects.all()
|
||||
filterset = filtersets.IPRangeFilterSet
|
||||
|
||||
|
||||
@register_model_view(IPRange, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -1066,6 +1073,7 @@ class VLANGroupBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(VLANGroup, 'bulk_rename', path='rename', detail=False)
|
||||
class VLANGroupBulkRenameView(generic.BulkRenameView):
|
||||
queryset = VLANGroup.objects.all()
|
||||
filterset = filtersets.VLANGroupFilterSet
|
||||
|
||||
|
||||
@register_model_view(VLANGroup, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -1160,6 +1168,7 @@ class VLANTranslationPolicyBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(VLANTranslationPolicy, 'bulk_rename', path='rename', detail=False)
|
||||
class VLANTranslationPolicyBulkRenameView(generic.BulkRenameView):
|
||||
queryset = VLANTranslationPolicy.objects.all()
|
||||
filterset = filtersets.VLANTranslationPolicyFilterSet
|
||||
|
||||
|
||||
@register_model_view(VLANTranslationPolicy, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -1315,6 +1324,7 @@ class FHRPGroupBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(FHRPGroup, 'bulk_rename', path='rename', detail=False)
|
||||
class FHRPGroupBulkRenameView(generic.BulkRenameView):
|
||||
queryset = FHRPGroup.objects.all()
|
||||
filterset = filtersets.FHRPGroupFilterSet
|
||||
|
||||
|
||||
@register_model_view(FHRPGroup, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -1447,6 +1457,7 @@ class VLANBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(VLAN, 'bulk_rename', path='rename', detail=False)
|
||||
class VLANBulkRenameView(generic.BulkRenameView):
|
||||
queryset = VLAN.objects.all()
|
||||
filterset = filtersets.VLANFilterSet
|
||||
|
||||
|
||||
@register_model_view(VLAN, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -1502,6 +1513,7 @@ class ServiceTemplateBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(ServiceTemplate, 'bulk_rename', path='rename', detail=False)
|
||||
class ServiceTemplateBulkRenameView(generic.BulkRenameView):
|
||||
queryset = ServiceTemplate.objects.all()
|
||||
filterset = filtersets.ServiceTemplateFilterSet
|
||||
|
||||
|
||||
@register_model_view(ServiceTemplate, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -1574,6 +1586,7 @@ class ServiceBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(Service, 'bulk_rename', path='rename', detail=False)
|
||||
class ServiceBulkRenameView(generic.BulkRenameView):
|
||||
queryset = Service.objects.all()
|
||||
filterset = filtersets.ServiceFilterSet
|
||||
|
||||
|
||||
@register_model_view(Service, 'bulk_delete', path='delete', detail=False)
|
||||
|
||||
@@ -82,7 +82,7 @@ class Config:
|
||||
revision = ConfigRevision.objects.get(active=True)
|
||||
logger.debug(f"Loaded active configuration revision #{revision.pk}")
|
||||
except (ConfigRevision.DoesNotExist, ConfigRevision.MultipleObjectsReturned):
|
||||
logger.warning("No active configuration revision found - falling back to most recent")
|
||||
logger.debug("No active configuration revision found - falling back to most recent")
|
||||
revision = ConfigRevision.objects.order_by('-created').first()
|
||||
if revision is None:
|
||||
logger.debug("No previous configuration found in database; proceeding with default values")
|
||||
|
||||
@@ -183,6 +183,15 @@ PARAMS = (
|
||||
description=_("Enable maintenance mode"),
|
||||
field=forms.BooleanField
|
||||
),
|
||||
ConfigParam(
|
||||
name='COPILOT_ENABLED',
|
||||
label=_('NetBox Copilot enabled'),
|
||||
default=True,
|
||||
description=_(
|
||||
"Enable the NetBox Copilot AI agent globally. If enabled, users can toggle the agent individually."
|
||||
),
|
||||
field=forms.BooleanField
|
||||
),
|
||||
ConfigParam(
|
||||
name='GRAPHQL_ENABLED',
|
||||
label=_('GraphQL enabled'),
|
||||
|
||||
@@ -243,6 +243,9 @@ SESSION_FILE_PATH = None
|
||||
# },
|
||||
# "scripts": {
|
||||
# "BACKEND": "extras.storage.ScriptFileSystemStorage",
|
||||
# "OPTIONS": {
|
||||
# "allow_overwrite": True,
|
||||
# },
|
||||
# },
|
||||
# }
|
||||
|
||||
|
||||
@@ -25,10 +25,15 @@ def preferences(request):
|
||||
Adds preferences for the current user (if authenticated) to the template context.
|
||||
Example: {{ preferences|get_key:"pagination.placement" }}
|
||||
"""
|
||||
config = get_config()
|
||||
user_preferences = request.user.config if request.user.is_authenticated else {}
|
||||
return {
|
||||
'preferences': user_preferences,
|
||||
'htmx_navigation': user_preferences.get('ui.htmx_navigation', False) == 'true'
|
||||
'copilot_enabled': (
|
||||
config.COPILOT_ENABLED and not django_settings.ISOLATED_DEPLOYMENT and
|
||||
user_preferences.get('ui.copilot_enabled', False) == 'true'
|
||||
),
|
||||
'htmx_navigation': user_preferences.get('ui.htmx_navigation', False) == 'true',
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@ import json
|
||||
|
||||
from django import forms
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.db import models
|
||||
from django.db.models import Q
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
@@ -99,6 +100,35 @@ class NetBoxModelImportForm(CSVModelForm, NetBoxModelForm):
|
||||
def _get_form_field(self, customfield):
|
||||
return customfield.to_form_field(for_csv_import=True)
|
||||
|
||||
def clean(self):
|
||||
"""
|
||||
Cleans data in a form, ensuring proper handling of model fields with `null=True`.
|
||||
Overrides the `clean` method from the parent form to process and sanitize cleaned
|
||||
data for defined fields in the associated model.
|
||||
"""
|
||||
super().clean()
|
||||
cleaned = self.cleaned_data
|
||||
|
||||
model = getattr(self._meta, "model", None)
|
||||
if not model:
|
||||
return cleaned
|
||||
|
||||
for f in model._meta.get_fields():
|
||||
# Only forward, DB-backed fields (skip M2M & reverse relations)
|
||||
if not isinstance(f, models.Field) or not f.concrete or f.many_to_many:
|
||||
continue
|
||||
|
||||
if getattr(f, "null", False):
|
||||
name = f.name
|
||||
if name not in cleaned:
|
||||
continue
|
||||
val = cleaned[name]
|
||||
# Only coerce empty strings; leave other types alone
|
||||
if isinstance(val, str) and val.strip() == "":
|
||||
cleaned[name] = None
|
||||
|
||||
return cleaned
|
||||
|
||||
|
||||
class NetBoxModelBulkEditForm(ChangelogMessageMixin, CustomFieldsMixin, BulkEditForm):
|
||||
"""
|
||||
|
||||
@@ -2,14 +2,14 @@ import logging
|
||||
|
||||
from django.contrib.contenttypes.fields import GenericRelation
|
||||
from django.db import router
|
||||
from django.db.models.deletion import Collector
|
||||
from django.db.models.deletion import CASCADE, Collector
|
||||
|
||||
logger = logging.getLogger("netbox.models.deletion")
|
||||
|
||||
|
||||
class CustomCollector(Collector):
|
||||
"""
|
||||
Custom collector that handles GenericRelations correctly.
|
||||
Override Django's stock Collector to handle GenericRelations and ensure proper ordering of cascading deletions.
|
||||
"""
|
||||
|
||||
def collect(
|
||||
@@ -23,11 +23,15 @@ class CustomCollector(Collector):
|
||||
keep_parents=False,
|
||||
fail_on_restricted=True,
|
||||
):
|
||||
"""
|
||||
Override collect to first collect standard dependencies,
|
||||
then add GenericRelations to the dependency graph.
|
||||
"""
|
||||
# Call parent collect first to get all standard dependencies
|
||||
# By default, Django will force the deletion of dependent objects before the parent only if the ForeignKey field
|
||||
# is not nullable. We want to ensure proper ordering regardless, so if the ForeignKey has `on_delete=CASCADE`
|
||||
# applied, we set `nullable` to False when calling `collect()`.
|
||||
if objs and source and source_attr:
|
||||
model = objs[0].__class__
|
||||
field = model._meta.get_field(source_attr)
|
||||
if field.remote_field.on_delete == CASCADE:
|
||||
nullable = False
|
||||
|
||||
super().collect(
|
||||
objs,
|
||||
source=source,
|
||||
@@ -39,10 +43,8 @@ class CustomCollector(Collector):
|
||||
fail_on_restricted=fail_on_restricted,
|
||||
)
|
||||
|
||||
# Track which GenericRelations we've already processed to prevent infinite recursion
|
||||
# Add GenericRelations to the dependency graph
|
||||
processed_relations = set()
|
||||
|
||||
# Now add GenericRelations to the dependency graph
|
||||
for _, instances in list(self.data.items()):
|
||||
for instance in instances:
|
||||
# Get all GenericRelations for this model
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from django.template import loader
|
||||
from django.urls.exceptions import NoReverseMatch
|
||||
from django.utils.translation import gettext as _
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from core.models import ObjectType
|
||||
from extras.models import ExportTemplate
|
||||
|
||||
@@ -49,6 +49,15 @@ PREFERENCES = {
|
||||
else ''
|
||||
)
|
||||
),
|
||||
'ui.copilot_enabled': UserPreference(
|
||||
label=_('NetBox Copilot'),
|
||||
choices=(
|
||||
('', _('Disabled')),
|
||||
('true', _('Enabled')),
|
||||
),
|
||||
description=_('Enable the NetBox Copilot AI agent'),
|
||||
default=False,
|
||||
),
|
||||
'pagination.per_page': UserPreference(
|
||||
label=_('Page length'),
|
||||
choices=get_page_lengths(),
|
||||
|
||||
@@ -291,6 +291,9 @@ DEFAULT_STORAGES = {
|
||||
},
|
||||
"scripts": {
|
||||
"BACKEND": "extras.storage.ScriptFileSystemStorage",
|
||||
"OPTIONS": {
|
||||
"allow_overwrite": True,
|
||||
},
|
||||
},
|
||||
}
|
||||
STORAGES = DEFAULT_STORAGES | STORAGES
|
||||
@@ -653,6 +656,13 @@ DEPLOYMENT_ID = hashlib.sha256(SECRET_KEY.encode('utf-8')).hexdigest()[:16]
|
||||
CENSUS_URL = 'https://census.netbox.oss.netboxlabs.com/api/v1/'
|
||||
|
||||
|
||||
#
|
||||
# NetBox Copilot
|
||||
#
|
||||
|
||||
NETBOX_COPILOT_URL = 'https://static.copilot.netboxlabs.ai/load.js'
|
||||
|
||||
|
||||
#
|
||||
# Django social auth
|
||||
#
|
||||
|
||||
303
netbox/netbox/tests/test_forms.py
Normal file
303
netbox/netbox/tests/test_forms.py
Normal file
@@ -0,0 +1,303 @@
|
||||
from django.test import TestCase
|
||||
|
||||
from dcim.choices import InterfaceTypeChoices
|
||||
from dcim.forms import InterfaceImportForm
|
||||
from dcim.models import Device, DeviceRole, DeviceType, Interface, Manufacturer, Site
|
||||
|
||||
|
||||
class NetBoxModelImportFormCleanTest(TestCase):
|
||||
"""
|
||||
Test the clean() method of NetBoxModelImportForm to ensure it properly converts
|
||||
empty strings to None for nullable fields during CSV import.
|
||||
Uses InterfaceImportForm as the concrete implementation to test.
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
# Create minimal test fixtures for Interface
|
||||
cls.site = Site.objects.create(name='Test Site', slug='test-site')
|
||||
cls.manufacturer = Manufacturer.objects.create(name='Test Manufacturer', slug='test-manufacturer')
|
||||
cls.device_type = DeviceType.objects.create(
|
||||
manufacturer=cls.manufacturer, model='Test Device Type', slug='test-device-type'
|
||||
)
|
||||
cls.device_role = DeviceRole.objects.create(name='Test Role', slug='test-role', color='ff0000')
|
||||
cls.device = Device.objects.create(
|
||||
name='Test Device', device_type=cls.device_type, role=cls.device_role, site=cls.site
|
||||
)
|
||||
# Create parent interfaces for ForeignKey testing
|
||||
cls.parent_interface = Interface.objects.create(
|
||||
device=cls.device, name='Parent Interface', type=InterfaceTypeChoices.TYPE_1GE_GBIC
|
||||
)
|
||||
cls.lag_interface = Interface.objects.create(
|
||||
device=cls.device, name='LAG Interface', type=InterfaceTypeChoices.TYPE_LAG
|
||||
)
|
||||
|
||||
def test_empty_string_to_none_nullable_charfield(self):
|
||||
"""Empty strings should convert to None for nullable CharField"""
|
||||
form = InterfaceImportForm(
|
||||
data={
|
||||
'device': self.device,
|
||||
'name': 'Interface 1',
|
||||
'type': InterfaceTypeChoices.TYPE_1GE_GBIC,
|
||||
'duplex': '', # nullable CharField
|
||||
}
|
||||
)
|
||||
self.assertTrue(form.is_valid(), f'Form errors: {form.errors}')
|
||||
self.assertIsNone(form.cleaned_data['duplex'])
|
||||
|
||||
def test_empty_string_to_none_nullable_integerfield(self):
|
||||
"""Empty strings should convert to None for nullable PositiveIntegerField"""
|
||||
form = InterfaceImportForm(
|
||||
data={
|
||||
'device': self.device,
|
||||
'name': 'Interface 2',
|
||||
'type': InterfaceTypeChoices.TYPE_1GE_GBIC,
|
||||
'speed': '', # nullable PositiveIntegerField
|
||||
}
|
||||
)
|
||||
self.assertTrue(form.is_valid(), f'Form errors: {form.errors}')
|
||||
self.assertIsNone(form.cleaned_data['speed'])
|
||||
|
||||
def test_empty_string_to_none_nullable_smallintegerfield(self):
|
||||
"""Empty strings should convert to None for nullable SmallIntegerField"""
|
||||
form = InterfaceImportForm(
|
||||
data={
|
||||
'device': self.device,
|
||||
'name': 'Interface 3',
|
||||
'type': InterfaceTypeChoices.TYPE_1GE_GBIC,
|
||||
'tx_power': '', # nullable SmallIntegerField
|
||||
}
|
||||
)
|
||||
self.assertTrue(form.is_valid(), f'Form errors: {form.errors}')
|
||||
self.assertIsNone(form.cleaned_data['tx_power'])
|
||||
|
||||
def test_empty_string_to_none_nullable_decimalfield(self):
|
||||
"""Empty strings should convert to None for nullable DecimalField"""
|
||||
form = InterfaceImportForm(
|
||||
data={
|
||||
'device': self.device,
|
||||
'name': 'Interface 4',
|
||||
'type': InterfaceTypeChoices.TYPE_1GE_GBIC,
|
||||
'rf_channel_frequency': '', # nullable DecimalField
|
||||
'rf_channel_width': '', # nullable DecimalField
|
||||
}
|
||||
)
|
||||
self.assertTrue(form.is_valid(), f'Form errors: {form.errors}')
|
||||
self.assertIsNone(form.cleaned_data['rf_channel_frequency'])
|
||||
self.assertIsNone(form.cleaned_data['rf_channel_width'])
|
||||
|
||||
def test_empty_string_to_none_nullable_foreignkey(self):
|
||||
"""Empty strings should convert to None for nullable ForeignKey"""
|
||||
form = InterfaceImportForm(
|
||||
data={
|
||||
'device': self.device,
|
||||
'name': 'Interface 5',
|
||||
'type': InterfaceTypeChoices.TYPE_1GE_GBIC,
|
||||
'lag': '', # nullable ForeignKey
|
||||
'parent': '', # nullable ForeignKey
|
||||
'bridge': '', # nullable ForeignKey
|
||||
'vrf': '', # nullable ForeignKey
|
||||
}
|
||||
)
|
||||
self.assertTrue(form.is_valid(), f'Form errors: {form.errors}')
|
||||
self.assertIsNone(form.cleaned_data['lag'])
|
||||
self.assertIsNone(form.cleaned_data['parent'])
|
||||
self.assertIsNone(form.cleaned_data['bridge'])
|
||||
self.assertIsNone(form.cleaned_data['vrf'])
|
||||
|
||||
def test_empty_string_preserved_non_nullable_charfield(self):
|
||||
"""Empty strings should be preserved for non-nullable CharField (blank=True only)"""
|
||||
form = InterfaceImportForm(
|
||||
data={
|
||||
'device': self.device,
|
||||
'name': 'Interface 6',
|
||||
'type': InterfaceTypeChoices.TYPE_1GE_GBIC,
|
||||
'label': '', # CharField with blank=True (not null=True)
|
||||
'description': '', # CharField with blank=True (not null=True)
|
||||
}
|
||||
)
|
||||
self.assertTrue(form.is_valid(), f'Form errors: {form.errors}')
|
||||
# label and description are NOT nullable in the model, so empty string remains
|
||||
self.assertEqual(form.cleaned_data['label'], '')
|
||||
self.assertEqual(form.cleaned_data['description'], '')
|
||||
|
||||
def test_empty_string_not_converted_for_required_fields(self):
|
||||
"""Empty strings should NOT be converted for required fields"""
|
||||
form = InterfaceImportForm(
|
||||
data={
|
||||
'device': self.device,
|
||||
'name': '', # required field, empty string should remain and cause error
|
||||
'type': InterfaceTypeChoices.TYPE_1GE_GBIC,
|
||||
}
|
||||
)
|
||||
# Form should be invalid because name is required
|
||||
self.assertFalse(form.is_valid())
|
||||
if form.errors:
|
||||
self.assertIn('name', form.errors)
|
||||
|
||||
def test_non_string_none_value_preserved(self):
|
||||
"""None values should be preserved (not modified)"""
|
||||
form = InterfaceImportForm(
|
||||
data={
|
||||
'device': self.device,
|
||||
'name': 'Interface 7',
|
||||
'type': InterfaceTypeChoices.TYPE_1GE_GBIC,
|
||||
'speed': None, # Already None
|
||||
'tx_power': None, # Already None
|
||||
}
|
||||
)
|
||||
self.assertTrue(form.is_valid(), f'Form errors: {form.errors}')
|
||||
self.assertIsNone(form.cleaned_data['speed'])
|
||||
self.assertIsNone(form.cleaned_data['tx_power'])
|
||||
|
||||
def test_non_string_numeric_values_preserved(self):
|
||||
"""Numeric values (including 0) should not be modified"""
|
||||
form = InterfaceImportForm(
|
||||
data={
|
||||
'device': self.device,
|
||||
'name': 'Interface 8',
|
||||
'type': InterfaceTypeChoices.TYPE_1GE_GBIC,
|
||||
'speed': 0, # nullable PositiveIntegerField with value 0
|
||||
'tx_power': 0, # nullable SmallIntegerField with value 0
|
||||
}
|
||||
)
|
||||
self.assertTrue(form.is_valid(), f'Form errors: {form.errors}')
|
||||
self.assertEqual(form.cleaned_data['speed'], 0)
|
||||
self.assertEqual(form.cleaned_data['tx_power'], 0)
|
||||
|
||||
def test_manytomany_fields_skipped(self):
|
||||
"""ManyToMany fields should be skipped and not cause errors"""
|
||||
# Interface has 'vdcs' and 'wireless_lans' as M2M fields
|
||||
form = InterfaceImportForm(
|
||||
data={
|
||||
'device': self.device,
|
||||
'name': 'Interface 9',
|
||||
'type': InterfaceTypeChoices.TYPE_1GE_GBIC,
|
||||
# vdcs and wireless_lans fields are M2M, handled by parent class
|
||||
}
|
||||
)
|
||||
self.assertTrue(form.is_valid(), f'Form errors: {form.errors}')
|
||||
|
||||
def test_fields_not_in_cleaned_data_skipped(self):
|
||||
"""Fields not present in cleaned_data should be skipped gracefully"""
|
||||
# Create minimal form data - some nullable fields won't be in cleaned_data
|
||||
form = InterfaceImportForm(
|
||||
data={
|
||||
'device': self.device,
|
||||
'name': 'Interface 10',
|
||||
'type': InterfaceTypeChoices.TYPE_1GE_GBIC,
|
||||
# lag, parent, bridge, vrf, speed, etc. not provided
|
||||
}
|
||||
)
|
||||
# Should not raise KeyError when checking fields not in form data
|
||||
self.assertTrue(form.is_valid(), f'Form errors: {form.errors}')
|
||||
|
||||
def test_valid_string_values_preserved(self):
|
||||
"""Non-empty string values should be properly converted to their target types"""
|
||||
form = InterfaceImportForm(
|
||||
data={
|
||||
'device': self.device,
|
||||
'name': 'Interface 11',
|
||||
'type': InterfaceTypeChoices.TYPE_1GE_GBIC,
|
||||
'speed': '1000000', # Valid speed value (string will be converted to int)
|
||||
'mtu': '1500', # Valid mtu value (string will be converted to int)
|
||||
'description': 'Test description',
|
||||
}
|
||||
)
|
||||
self.assertTrue(form.is_valid(), f'Form errors: {form.errors}')
|
||||
# speed and mtu are converted to int
|
||||
self.assertEqual(form.cleaned_data['speed'], 1000000)
|
||||
self.assertEqual(form.cleaned_data['mtu'], 1500)
|
||||
self.assertEqual(form.cleaned_data['description'], 'Test description')
|
||||
|
||||
def test_multiple_nullable_fields_with_empty_strings(self):
|
||||
"""Multiple nullable fields with empty strings should all convert to None"""
|
||||
form = InterfaceImportForm(
|
||||
data={
|
||||
'device': self.device,
|
||||
'name': 'Interface 12',
|
||||
'type': InterfaceTypeChoices.TYPE_1GE_GBIC,
|
||||
'speed': '', # nullable
|
||||
'duplex': '', # nullable
|
||||
'tx_power': '', # nullable
|
||||
'vrf': '', # nullable ForeignKey
|
||||
'poe_mode': '', # nullable
|
||||
'poe_type': '', # nullable
|
||||
}
|
||||
)
|
||||
self.assertTrue(form.is_valid(), f'Form errors: {form.errors}')
|
||||
# All nullable fields should convert to None
|
||||
self.assertIsNone(form.cleaned_data['speed'])
|
||||
self.assertIsNone(form.cleaned_data['duplex'])
|
||||
self.assertIsNone(form.cleaned_data['tx_power'])
|
||||
self.assertIsNone(form.cleaned_data['vrf'])
|
||||
self.assertIsNone(form.cleaned_data['poe_mode'])
|
||||
self.assertIsNone(form.cleaned_data['poe_type'])
|
||||
|
||||
def test_mixed_nullable_and_non_nullable_empty_strings(self):
|
||||
"""Combination of nullable and non-nullable fields with empty strings"""
|
||||
form = InterfaceImportForm(
|
||||
data={
|
||||
'device': self.device,
|
||||
'name': 'Interface 13',
|
||||
'type': InterfaceTypeChoices.TYPE_1GE_GBIC,
|
||||
'speed': '', # nullable, should become None
|
||||
'label': '', # NOT nullable (blank=True only), should remain empty string
|
||||
'duplex': '', # nullable, should become None
|
||||
'description': '', # NOT nullable (blank=True only), should remain empty string
|
||||
}
|
||||
)
|
||||
self.assertTrue(form.is_valid(), f'Form errors: {form.errors}')
|
||||
# Nullable fields convert to None
|
||||
self.assertIsNone(form.cleaned_data['speed'])
|
||||
self.assertIsNone(form.cleaned_data['duplex'])
|
||||
# Non-nullable fields remain empty strings
|
||||
self.assertEqual(form.cleaned_data['label'], '')
|
||||
self.assertEqual(form.cleaned_data['description'], '')
|
||||
|
||||
def test_wireless_fields_nullable(self):
|
||||
"""Wireless-specific nullable fields should convert empty strings to None"""
|
||||
form = InterfaceImportForm(
|
||||
data={
|
||||
'device': self.device,
|
||||
'name': 'Interface 14',
|
||||
'type': InterfaceTypeChoices.TYPE_1GE_GBIC,
|
||||
'rf_role': '', # nullable CharField
|
||||
'rf_channel': '', # nullable CharField
|
||||
'rf_channel_frequency': '', # nullable DecimalField
|
||||
'rf_channel_width': '', # nullable DecimalField
|
||||
}
|
||||
)
|
||||
self.assertTrue(form.is_valid(), f'Form errors: {form.errors}')
|
||||
self.assertIsNone(form.cleaned_data['rf_role'])
|
||||
self.assertIsNone(form.cleaned_data['rf_channel'])
|
||||
self.assertIsNone(form.cleaned_data['rf_channel_frequency'])
|
||||
self.assertIsNone(form.cleaned_data['rf_channel_width'])
|
||||
|
||||
def test_poe_fields_nullable(self):
|
||||
"""PoE-specific nullable fields should convert empty strings to None"""
|
||||
form = InterfaceImportForm(
|
||||
data={
|
||||
'device': self.device,
|
||||
'name': 'Interface 15',
|
||||
'type': InterfaceTypeChoices.TYPE_1GE_GBIC,
|
||||
'poe_mode': '', # nullable CharField
|
||||
'poe_type': '', # nullable CharField
|
||||
}
|
||||
)
|
||||
self.assertTrue(form.is_valid(), f'Form errors: {form.errors}')
|
||||
self.assertIsNone(form.cleaned_data['poe_mode'])
|
||||
self.assertIsNone(form.cleaned_data['poe_type'])
|
||||
|
||||
def test_wwn_field_nullable(self):
|
||||
"""WWN field (special field type) should convert empty string to None"""
|
||||
form = InterfaceImportForm(
|
||||
data={
|
||||
'device': self.device,
|
||||
'name': 'Interface 16',
|
||||
'type': InterfaceTypeChoices.TYPE_1GE_GBIC,
|
||||
'wwn': '', # nullable WWNField
|
||||
}
|
||||
)
|
||||
self.assertTrue(form.is_valid(), f'Form errors: {form.errors}')
|
||||
self.assertIsNone(form.cleaned_data['wwn'])
|
||||
@@ -323,7 +323,7 @@ class BulkCreateView(GetReturnURLMixin, BaseMultiObjectView):
|
||||
|
||||
class BulkImportView(GetReturnURLMixin, BaseMultiObjectView):
|
||||
"""
|
||||
Import objects in bulk (CSV format).
|
||||
Import objects in bulk (CSV/JSON/YAML format).
|
||||
|
||||
Attributes:
|
||||
model_form: The form used to create each imported object
|
||||
@@ -368,7 +368,7 @@ class BulkImportView(GetReturnURLMixin, BaseMultiObjectView):
|
||||
error_messages.append(f"Record {index} {prefix}{field_name}: {err}")
|
||||
return error_messages
|
||||
|
||||
def _save_object(self, model_form, request):
|
||||
def _save_object(self, model_form, request, parent_idx):
|
||||
_action = 'Updated' if model_form.instance.pk else 'Created'
|
||||
|
||||
# Save the primary object
|
||||
@@ -381,8 +381,25 @@ class BulkImportView(GetReturnURLMixin, BaseMultiObjectView):
|
||||
# Iterate through the related object forms (if any), validating and saving each instance.
|
||||
for field_name, related_object_form in self.related_object_forms.items():
|
||||
|
||||
related_objects = model_form.data.get(field_name, list())
|
||||
if not isinstance(related_objects, list):
|
||||
raise ValidationError(
|
||||
self._compile_form_errors(
|
||||
{field_name: [_("Must be a list.")]},
|
||||
index=parent_idx
|
||||
)
|
||||
)
|
||||
|
||||
related_obj_pks = []
|
||||
for i, rel_obj_data in enumerate(model_form.data.get(field_name, list())):
|
||||
for i, rel_obj_data in enumerate(related_objects, start=1):
|
||||
if not isinstance(rel_obj_data, dict):
|
||||
raise ValidationError(
|
||||
self._compile_form_errors(
|
||||
{f'{field_name}[{i}]': [_("Must be a dictionary.")]},
|
||||
index=parent_idx,
|
||||
)
|
||||
)
|
||||
|
||||
rel_obj_data = self.prep_related_object_data(obj, rel_obj_data)
|
||||
f = related_object_form(rel_obj_data)
|
||||
|
||||
@@ -396,7 +413,7 @@ class BulkImportView(GetReturnURLMixin, BaseMultiObjectView):
|
||||
else:
|
||||
# Replicate errors on the related object form to the import form for display and abort
|
||||
raise ValidationError(
|
||||
self._compile_form_errors(f.errors, index=i, prefix=f'{field_name}[{i}]')
|
||||
self._compile_form_errors(f.errors, index=parent_idx, prefix=f'{field_name}[{i}]')
|
||||
)
|
||||
|
||||
# Enforce object-level permissions on related objects
|
||||
@@ -439,8 +456,12 @@ class BulkImportView(GetReturnURLMixin, BaseMultiObjectView):
|
||||
try:
|
||||
instance = prefetched_objects[object_id]
|
||||
except KeyError:
|
||||
form.add_error('data', _("Row {i}: Object with ID {id} does not exist").format(i=i, id=object_id))
|
||||
raise ValidationError('')
|
||||
raise ValidationError(
|
||||
self._compile_form_errors(
|
||||
{'id': [_("Object with ID {id} does not exist").format(id=object_id)]},
|
||||
index=i
|
||||
)
|
||||
)
|
||||
|
||||
# Take a snapshot for change logging
|
||||
if instance.pk and hasattr(instance, 'snapshot'):
|
||||
@@ -481,7 +502,7 @@ class BulkImportView(GetReturnURLMixin, BaseMultiObjectView):
|
||||
restrict_form_fields(model_form, request.user)
|
||||
|
||||
if model_form.is_valid():
|
||||
obj = self._save_object(model_form, request)
|
||||
obj = self._save_object(model_form, request, i)
|
||||
saved_objects.append(obj)
|
||||
else:
|
||||
# Raise model form errors
|
||||
@@ -799,6 +820,9 @@ class BulkRenameView(GetReturnURLMixin, BaseMultiObjectView):
|
||||
"""
|
||||
field_name = 'name'
|
||||
template_name = 'generic/bulk_rename.html'
|
||||
# Match BulkEditView/BulkDeleteView behavior: allow passing a FilterSet
|
||||
# so "Select all N matching query" can expand across the full queryset.
|
||||
filterset = None
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
@@ -827,12 +851,12 @@ class BulkRenameView(GetReturnURLMixin, BaseMultiObjectView):
|
||||
replace = form.cleaned_data['replace']
|
||||
if form.cleaned_data['use_regex']:
|
||||
try:
|
||||
obj.new_name = re.sub(find, replace, getattr(obj, self.field_name, ''))
|
||||
obj.new_name = re.sub(find, replace, getattr(obj, self.field_name, '') or '')
|
||||
# Catch regex group reference errors
|
||||
except re.error:
|
||||
obj.new_name = getattr(obj, self.field_name)
|
||||
else:
|
||||
obj.new_name = getattr(obj, self.field_name, '').replace(find, replace)
|
||||
obj.new_name = (getattr(obj, self.field_name, '') or '').replace(find, replace)
|
||||
renamed_pks.append(obj.pk)
|
||||
|
||||
return renamed_pks
|
||||
@@ -840,9 +864,16 @@ class BulkRenameView(GetReturnURLMixin, BaseMultiObjectView):
|
||||
def post(self, request):
|
||||
logger = logging.getLogger('netbox.views.BulkRenameView')
|
||||
|
||||
# If we are editing *all* objects in the queryset, replace the PK list with all matched objects.
|
||||
if request.POST.get('_all') and self.filterset is not None:
|
||||
pk_list = self.filterset(request.GET, self.queryset.values_list('pk', flat=True), request=request).qs
|
||||
else:
|
||||
pk_list = request.POST.getlist('pk')
|
||||
|
||||
selected_objects = self.queryset.filter(pk__in=pk_list)
|
||||
|
||||
if '_preview' in request.POST or '_apply' in request.POST:
|
||||
form = self.form(request.POST, initial={'pk': request.POST.getlist('pk')})
|
||||
selected_objects = self.queryset.filter(pk__in=form.initial['pk'])
|
||||
form = self.form(request.POST, initial={'pk': pk_list})
|
||||
|
||||
if form.is_valid():
|
||||
try:
|
||||
@@ -877,8 +908,7 @@ class BulkRenameView(GetReturnURLMixin, BaseMultiObjectView):
|
||||
clear_events.send(sender=self)
|
||||
|
||||
else:
|
||||
form = self.form(initial={'pk': request.POST.getlist('pk')})
|
||||
selected_objects = self.queryset.filter(pk__in=form.initial['pk'])
|
||||
form = self.form(initial={'pk': pk_list})
|
||||
|
||||
return render(request, self.template_name, {
|
||||
'field_name': self.field_name,
|
||||
|
||||
@@ -559,6 +559,7 @@ class ComponentCreateView(GetReturnURLMixin, BaseObjectView):
|
||||
form.instance._replicated_base = hasattr(self.form, "replication_fields")
|
||||
|
||||
if form.is_valid():
|
||||
changelog_message = form.cleaned_data.pop('changelog_message', '')
|
||||
new_components = []
|
||||
data = deepcopy(request.POST)
|
||||
pattern_count = len(form.cleaned_data[self.form.replication_fields[0]])
|
||||
@@ -585,6 +586,9 @@ class ComponentCreateView(GetReturnURLMixin, BaseObjectView):
|
||||
# Create the new components
|
||||
new_objs = []
|
||||
for component_form in new_components:
|
||||
# Record changelog message (if any)
|
||||
if changelog_message:
|
||||
component_form.instance._changelog_message = changelog_message
|
||||
obj = component_form.save()
|
||||
new_objs.append(obj)
|
||||
|
||||
|
||||
2
netbox/project-static/dist/netbox.css
vendored
2
netbox/project-static/dist/netbox.css
vendored
File diff suppressed because one or more lines are too long
4
netbox/project-static/dist/netbox.js
vendored
4
netbox/project-static/dist/netbox.js
vendored
File diff suppressed because one or more lines are too long
4
netbox/project-static/dist/netbox.js.map
vendored
4
netbox/project-static/dist/netbox.js.map
vendored
File diff suppressed because one or more lines are too long
@@ -28,9 +28,9 @@
|
||||
"clipboard": "2.0.11",
|
||||
"flatpickr": "4.6.13",
|
||||
"gridstack": "12.3.3",
|
||||
"htmx.org": "2.0.7",
|
||||
"htmx.org": "2.0.8",
|
||||
"query-string": "9.3.1",
|
||||
"sass": "1.93.2",
|
||||
"sass": "1.94.2",
|
||||
"tom-select": "2.4.3",
|
||||
"typeface-inter": "3.18.1",
|
||||
"typeface-roboto-mono": "1.1.13"
|
||||
|
||||
@@ -20,11 +20,13 @@ function slugify(slug: string, chars: number): string {
|
||||
* For any slug fields, add event listeners to handle automatically generating slug values.
|
||||
*/
|
||||
export function initReslug(): void {
|
||||
for (const slugButton of getElements<HTMLButtonElement>('button#reslug')) {
|
||||
for (const slugButton of getElements<HTMLButtonElement>('button.reslug')) {
|
||||
const form = slugButton.form;
|
||||
if (form == null) continue;
|
||||
const slugField = form.querySelector('#id_slug') as HTMLInputElement;
|
||||
|
||||
const slugField = form.querySelector('input.slug-field') as HTMLInputElement;
|
||||
if (slugField == null) continue;
|
||||
|
||||
const sourceId = slugField.getAttribute('slug-source');
|
||||
const sourceField = form.querySelector(`#id_${sourceId}`) as HTMLInputElement;
|
||||
|
||||
|
||||
@@ -16,6 +16,11 @@ pre {
|
||||
background: var(--#{$prefix}bg-surface);
|
||||
}
|
||||
|
||||
// Permit copying of badge text
|
||||
.badge {
|
||||
user-select: text;
|
||||
}
|
||||
|
||||
// Button adjustments
|
||||
.btn {
|
||||
// Tabler sets display: flex
|
||||
@@ -157,3 +162,18 @@ pre code {
|
||||
vertical-align: .05em;
|
||||
height: auto;
|
||||
}
|
||||
|
||||
// Theme-based visibility utilities
|
||||
// Tabler's .hide-theme-* utilities expect data-bs-theme on :root, but NetBox applies
|
||||
// it to body. These overrides use higher specificity selectors to ensure theme-based
|
||||
// visibility works correctly. The :root:not(.dummy) pattern provides the additional
|
||||
// specificity needed to override Tabler's :root:not() rules.
|
||||
:root:not(.dummy) body[data-bs-theme='light'] .hide-theme-light,
|
||||
:root:not(.dummy) body[data-bs-theme='dark'] .hide-theme-dark {
|
||||
display: none !important;
|
||||
}
|
||||
|
||||
:root:not(.dummy) body[data-bs-theme='dark'] .hide-theme-light,
|
||||
:root:not(.dummy) body[data-bs-theme='light'] .hide-theme-dark {
|
||||
display: inline-flex !important;
|
||||
}
|
||||
|
||||
@@ -2241,10 +2241,10 @@ hey-listen@^1.0.8:
|
||||
resolved "https://registry.yarnpkg.com/hey-listen/-/hey-listen-1.0.8.tgz#8e59561ff724908de1aa924ed6ecc84a56a9aa68"
|
||||
integrity sha512-COpmrF2NOg4TBWUJ5UVyaCU2A88wEMkUPK4hNqyCkqHbxT92BbvfjoSozkAIIm6XhicGlJHhFdullInrdhwU8Q==
|
||||
|
||||
htmx.org@2.0.7:
|
||||
version "2.0.7"
|
||||
resolved "https://registry.yarnpkg.com/htmx.org/-/htmx.org-2.0.7.tgz#991571e009a2ea4cb60e7af8bb4c1c8c0de32ecd"
|
||||
integrity sha512-YiJqF3U5KyO28VC5mPfehKJPF+n1Gni+cupK+D69TF0nm7wY6AXn3a4mPWIikfAXtl1u1F1+ZhSCS7KT8pVmqA==
|
||||
htmx.org@2.0.8:
|
||||
version "2.0.8"
|
||||
resolved "https://registry.yarnpkg.com/htmx.org/-/htmx.org-2.0.8.tgz#8ac8ba87c141b7bfda7576117476062eeb4aceda"
|
||||
integrity sha512-fm297iru0iWsNJlBrjvtN7V9zjaxd+69Oqjh4F/Vq9Wwi2kFisLcrLCiv5oBX0KLfOX/zG8AUo9ROMU5XUB44Q==
|
||||
|
||||
ignore@^5.2.0:
|
||||
version "5.3.2"
|
||||
@@ -3190,10 +3190,10 @@ safe-regex-test@^1.1.0:
|
||||
es-errors "^1.3.0"
|
||||
is-regex "^1.2.1"
|
||||
|
||||
sass@1.93.2:
|
||||
version "1.93.2"
|
||||
resolved "https://registry.yarnpkg.com/sass/-/sass-1.93.2.tgz#e97d225d60f59a3b3dbb6d2ae3c1b955fd1f2cd1"
|
||||
integrity sha512-t+YPtOQHpGW1QWsh1CHQ5cPIr9lbbGZLZnbihP/D/qZj/yuV68m8qarcV17nvkOX81BCrvzAlq2klCQFZghyTg==
|
||||
sass@1.94.2:
|
||||
version "1.94.2"
|
||||
resolved "https://registry.yarnpkg.com/sass/-/sass-1.94.2.tgz#198511fc6fdd2fc0a71b8d1261735c12608d4ef3"
|
||||
integrity sha512-N+7WK20/wOr7CzA2snJcUSSNTCzeCGUTFY3OgeQP3mZ1aj9NMQ0mSTXwlrnd89j33zzQJGqIN52GIOmYrfq46A==
|
||||
dependencies:
|
||||
chokidar "^4.0.0"
|
||||
immutable "^5.0.2"
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version: "4.4.4"
|
||||
version: "4.4.7"
|
||||
edition: "Community"
|
||||
published: "2025-10-15"
|
||||
published: "2025-11-25"
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user