mirror of
https://github.com/netbox-community/netbox.git
synced 2026-02-12 03:47:43 +01:00
Compare commits
40 Commits
12318-case
...
v4.4.5
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
43cb476223 | ||
|
|
d6f756d315 | ||
|
|
afc62b6ffd | ||
|
|
3d4841f17f | ||
|
|
2aefb3af73 | ||
|
|
4eff4d6a4a | ||
|
|
9381564cab | ||
|
|
3d143d635b | ||
|
|
77307b3c91 | ||
|
|
aa4571b61f | ||
|
|
56d9146323 | ||
|
|
e192f64dd2 | ||
|
|
d433a28524 | ||
|
|
dbfdf318ad | ||
|
|
639bc4462b | ||
|
|
1c59d411f7 | ||
|
|
ac7a4ec4a3 | ||
|
|
0cf58e62b2 | ||
|
|
fb8d41b527 | ||
|
|
ae5d7911f9 | ||
|
|
3bd0186870 | ||
|
|
09ce8a808d | ||
|
|
8eaff9dce7 | ||
|
|
cb3308a166 | ||
|
|
5fbae8407e | ||
|
|
2fdd46f64c | ||
|
|
c5124cb2e4 | ||
|
|
d01d7b4156 | ||
|
|
4db6123fb2 | ||
|
|
43648d629b | ||
|
|
0b97df0984 | ||
|
|
5334c8143c | ||
|
|
bbb330becf | ||
|
|
e4c74ce6a3 | ||
|
|
a4868f894d | ||
|
|
531ea34207 | ||
|
|
6747c82a1a | ||
|
|
e251ea10b5 | ||
|
|
a1aaf465ac | ||
|
|
2a1d315d85 |
@@ -2,7 +2,7 @@
|
||||
name: ✨ Feature Request
|
||||
type: Feature
|
||||
description: Propose a new NetBox feature or enhancement
|
||||
labels: ["type: feature", "status: needs triage"]
|
||||
labels: ["netbox", "type: feature", "status: needs triage"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
@@ -15,7 +15,7 @@ body:
|
||||
attributes:
|
||||
label: NetBox version
|
||||
description: What version of NetBox are you currently running?
|
||||
placeholder: v4.4.4
|
||||
placeholder: v4.4.5
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
|
||||
8
.github/ISSUE_TEMPLATE/02-bug_report.yaml
vendored
8
.github/ISSUE_TEMPLATE/02-bug_report.yaml
vendored
@@ -2,7 +2,7 @@
|
||||
name: 🐛 Bug Report
|
||||
type: Bug
|
||||
description: Report a reproducible bug in the current release of NetBox
|
||||
labels: ["type: bug", "status: needs triage"]
|
||||
labels: ["netbox", "type: bug", "status: needs triage"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
@@ -27,7 +27,7 @@ body:
|
||||
attributes:
|
||||
label: NetBox Version
|
||||
description: What version of NetBox are you currently running?
|
||||
placeholder: v4.4.4
|
||||
placeholder: v4.4.5
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
@@ -35,9 +35,9 @@ body:
|
||||
label: Python Version
|
||||
description: What version of Python are you currently running?
|
||||
options:
|
||||
- "3.10"
|
||||
- "3.11"
|
||||
- "3.12"
|
||||
- "3.13"
|
||||
- "3.14"
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
name: 📖 Documentation Change
|
||||
type: Documentation
|
||||
description: Suggest an addition or modification to the NetBox documentation
|
||||
labels: ["type: documentation", "status: needs triage"]
|
||||
labels: ["netbox", "type: documentation", "status: needs triage"]
|
||||
body:
|
||||
- type: dropdown
|
||||
attributes:
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/04-translation.yaml
vendored
2
.github/ISSUE_TEMPLATE/04-translation.yaml
vendored
@@ -2,7 +2,7 @@
|
||||
name: 🌍 Translation
|
||||
type: Translation
|
||||
description: Request support for a new language in the user interface
|
||||
labels: ["type: translation"]
|
||||
labels: ["netbox", "type: translation"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/05-housekeeping.yaml
vendored
2
.github/ISSUE_TEMPLATE/05-housekeeping.yaml
vendored
@@ -2,7 +2,7 @@
|
||||
name: 🏡 Housekeeping
|
||||
type: Housekeeping
|
||||
description: A change pertaining to the codebase itself (developers only)
|
||||
labels: ["type: housekeeping"]
|
||||
labels: ["netbox", "type: housekeeping"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/06-deprecation.yaml
vendored
2
.github/ISSUE_TEMPLATE/06-deprecation.yaml
vendored
@@ -2,7 +2,7 @@
|
||||
name: 🗑️ Deprecation
|
||||
type: Deprecation
|
||||
description: The removal of an existing feature or resource
|
||||
labels: ["type: deprecation"]
|
||||
labels: ["netbox", "type: deprecation"]
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
|
||||
2
.github/workflows/ci.yml
vendored
2
.github/workflows/ci.yml
vendored
@@ -31,7 +31,7 @@ jobs:
|
||||
NETBOX_CONFIGURATION: netbox.configuration_testing
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['3.12', '3.13']
|
||||
python-version: ['3.10', '3.11', '3.12']
|
||||
node-version: ['20.x']
|
||||
services:
|
||||
redis:
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.6.9
|
||||
rev: v0.14.1
|
||||
hooks:
|
||||
- id: ruff
|
||||
name: "Ruff linter"
|
||||
|
||||
1256
contrib/openapi.json
1256
contrib/openapi.json
File diff suppressed because it is too large
Load Diff
@@ -2,7 +2,7 @@
|
||||
|
||||
## Local Authentication
|
||||
|
||||
Local user accounts and groups can be created in NetBox under the "Authentication" section in the "Admin" menu.
|
||||
Local user accounts and groups can be created in NetBox under the "Authentication" section in the "Admin" menu. This section is available only to users with the "staff" permission enabled.
|
||||
|
||||
At a minimum, each user account must have a username and password set. User accounts may also denote a first name, last name, and email address. [Permissions](../permissions.md) may also be assigned to individual users and/or groups as needed.
|
||||
|
||||
|
||||
@@ -1,15 +1,5 @@
|
||||
# GraphQL API Parameters
|
||||
|
||||
## GRAPHQL_DEFAULT_VERSION
|
||||
|
||||
!!! note "This parameter was introduced in NetBox v4.5."
|
||||
|
||||
Default: `1`
|
||||
|
||||
Designates the default version of the GraphQL API served by `/graphql/`. To access a specific version, append the version number to the URL, e.g. `/graphql/v2/`.
|
||||
|
||||
---
|
||||
|
||||
## GRAPHQL_ENABLED
|
||||
|
||||
!!! tip "Dynamic Configuration Parameter"
|
||||
|
||||
@@ -53,6 +53,16 @@ Sets content for the top banner in the user interface.
|
||||
|
||||
---
|
||||
|
||||
## COPILOT_ENABLED
|
||||
|
||||
!!! tip "Dynamic Configuration Parameter"
|
||||
|
||||
Default: `True`
|
||||
|
||||
Enables or disables the [NetBox Copilot](https://netboxlabs.com/docs/copilot/) agent globally. When enabled, users can opt to toggle the agent individually.
|
||||
|
||||
---
|
||||
|
||||
## CENSUS_REPORTING_ENABLED
|
||||
|
||||
Default: `True`
|
||||
|
||||
@@ -127,3 +127,19 @@ The list of groups that promote an remote User to Superuser on Login. If group i
|
||||
Default: `[]` (Empty list)
|
||||
|
||||
The list of users that get promoted to Superuser on Login. If user isn't present in list on next Login, the Role gets revoked. (Requires `REMOTE_AUTH_ENABLED` and `REMOTE_AUTH_GROUP_SYNC_ENABLED` )
|
||||
|
||||
---
|
||||
|
||||
## REMOTE_AUTH_STAFF_GROUPS
|
||||
|
||||
Default: `[]` (Empty list)
|
||||
|
||||
The list of groups that promote an remote User to Staff on Login. If group isn't present on next Login, the Role gets revoked. (Requires `REMOTE_AUTH_ENABLED` and `REMOTE_AUTH_GROUP_SYNC_ENABLED` )
|
||||
|
||||
---
|
||||
|
||||
## REMOTE_AUTH_STAFF_USERS
|
||||
|
||||
Default: `[]` (Empty list)
|
||||
|
||||
The list of users that get promoted to Staff on Login. If user isn't present in list on next Login, the Role gets revoked. (Requires `REMOTE_AUTH_ENABLED` and `REMOTE_AUTH_GROUP_SYNC_ENABLED` )
|
||||
|
||||
@@ -23,31 +23,6 @@ ALLOWED_HOSTS = ['*']
|
||||
|
||||
---
|
||||
|
||||
## API_TOKEN_PEPPERS
|
||||
|
||||
!!! info "This parameter was introduced in NetBox v4.5."
|
||||
|
||||
[Cryptographic peppers](https://en.wikipedia.org/wiki/Pepper_(cryptography)) are employed to generate hashes of sensitive values on the server. This parameter defines the peppers used to hash v2 API tokens in NetBox. You must define at least one pepper before creating a v2 API token. See the [API documentation](../integrations/rest-api.md#authentication) for further information about how peppers are used.
|
||||
|
||||
```python
|
||||
API_TOKEN_PEPPERS = {
|
||||
# DO NOT USE THIS EXAMPLE PEPPER IN PRODUCTION
|
||||
1: 'kp7ht*76fiQAhUi5dHfASLlYUE_S^gI^(7J^K5M!LfoH@vl&b_',
|
||||
}
|
||||
```
|
||||
|
||||
!!! warning "Peppers are sensitive"
|
||||
Treat pepper values as extremely sensitive. Consider populating peppers from environment variables at initialization time rather than defining them in the configuration file, if feasible.
|
||||
|
||||
Peppers must be at least 50 characters in length and should comprise a random string with a diverse character set. Consider using the Python script at `$INSTALL_ROOT/netbox/generate_secret_key.py` to generate a pepper value.
|
||||
|
||||
It is recommended to start with a pepper ID of `1`. Additional peppers can be introduced later as needed to begin rotating token hashes.
|
||||
|
||||
!!! tip
|
||||
Although NetBox will run without `API_TOKEN_PEPPERS` defined, the use of v2 API tokens will be unavailable.
|
||||
|
||||
---
|
||||
|
||||
## DATABASE
|
||||
|
||||
!!! warning "Legacy Configuration Parameter"
|
||||
|
||||
@@ -1,5 +1,16 @@
|
||||
# Security & Authentication Parameters
|
||||
|
||||
## ALLOW_TOKEN_RETRIEVAL
|
||||
|
||||
Default: `False`
|
||||
|
||||
!!! note
|
||||
The default value of this parameter changed from `True` to `False` in NetBox v4.3.0.
|
||||
|
||||
If disabled, the values of API tokens will not be displayed after each token's initial creation. A user **must** record the value of a token prior to its creation, or it will be lost. Note that this affects _all_ users, regardless of assigned permissions.
|
||||
|
||||
---
|
||||
|
||||
## ALLOWED_URL_SCHEMES
|
||||
|
||||
!!! tip "Dynamic Configuration Parameter"
|
||||
|
||||
@@ -131,6 +131,17 @@ self.log_info(f"Running as user {username} (IP: {ip_address})...")
|
||||
|
||||
For a complete list of available request parameters, please see the [Django documentation](https://docs.djangoproject.com/en/stable/ref/request-response/).
|
||||
|
||||
## Reading Data from Files
|
||||
|
||||
The Script class provides two convenience methods for reading data from files:
|
||||
|
||||
* `load_yaml`
|
||||
* `load_json`
|
||||
|
||||
These two methods will load data in YAML or JSON format, respectively, from files within the local path (i.e. `SCRIPTS_ROOT`).
|
||||
|
||||
**Note:** These convenience methods are deprecated and will be removed in NetBox v4.4. These only work if running scripts within the local path, they will not work if using a storage other than ScriptFileSystemStorage.
|
||||
|
||||
## Logging
|
||||
|
||||
The Script object provides a set of convenient functions for recording messages at different severity levels:
|
||||
@@ -393,6 +404,61 @@ A complete date & time. Returns a `datetime.datetime` object.
|
||||
|
||||
Custom scripts can be run via the web UI by navigating to the script, completing any required form data, and clicking the "run script" button. It is possible to schedule a script to be executed at specified time in the future. A scheduled script can be canceled by deleting the associated job result object.
|
||||
|
||||
#### Prefilling variables via URL parameters
|
||||
|
||||
Script form fields can be prefilled by appending query parameters to the script URL. Each parameter name must match the variable name defined on the script class. Prefilled values are treated as initial values and can be edited before execution. Multiple values can be supplied by repeating the same parameter. Query values must be percent‑encoded where required (for example, spaces as `%20`).
|
||||
|
||||
Examples:
|
||||
|
||||
For string and integer variables, when a script defines:
|
||||
|
||||
```python
|
||||
from extras.scripts import Script, StringVar, IntegerVar
|
||||
|
||||
class MyScript(Script):
|
||||
name = StringVar()
|
||||
count = IntegerVar()
|
||||
```
|
||||
|
||||
the following URL prefills the `name` and `count` fields:
|
||||
|
||||
```
|
||||
https://<netbox>/extras/scripts/<script_id>/?name=Branch42&count=3
|
||||
```
|
||||
|
||||
For object variables (`ObjectVar`), supply the object’s primary key (PK):
|
||||
|
||||
```
|
||||
https://<netbox>/extras/scripts/<script_id>/?device=1
|
||||
```
|
||||
|
||||
If an object ID cannot be resolved or the object is not visible to the requesting user, the field remains unpopulated.
|
||||
|
||||
Supported variable types:
|
||||
|
||||
| Variable class | Expected input | Example query string |
|
||||
|--------------------------|---------------------------------|---------------------------------------------|
|
||||
| `StringVar` | string (percent‑encoded) | `?name=Branch42` |
|
||||
| `TextVar` | string (percent‑encoded) | `?notes=Initial%20value` |
|
||||
| `IntegerVar` | integer | `?count=3` |
|
||||
| `DecimalVar` | decimal number | `?ratio=0.75` |
|
||||
| `BooleanVar` | value → `True`; empty → `False` | `?enabled=true` (True), `?enabled=` (False) |
|
||||
| `ChoiceVar` | choice value (not label) | `?role=edge` |
|
||||
| `MultiChoiceVar` | choice values (repeat) | `?roles=edge&roles=core` |
|
||||
| `ObjectVar(Device)` | PK (integer) | `?device=1` |
|
||||
| `MultiObjectVar(Device)` | PKs (repeat) | `?devices=1&devices=2` |
|
||||
| `IPAddressVar` | IP address | `?ip=198.51.100.10` |
|
||||
| `IPAddressWithMaskVar` | IP address with mask | `?addr=192.0.2.1/24` |
|
||||
| `IPNetworkVar` | IP network prefix | `?network=2001:db8::/64` |
|
||||
| `DateVar` | date `YYYY-MM-DD` | `?date=2025-01-05` |
|
||||
| `DateTimeVar` | ISO datetime | `?when=2025-01-05T14:30:00` |
|
||||
| `FileVar` | — (not supported) | — |
|
||||
|
||||
!!! note
|
||||
- The parameter names above are examples; use the actual variable attribute names defined by the script.
|
||||
- For `BooleanVar`, only an empty value (`?enabled=`) unchecks the box; any other value including `false` or `0` checks it.
|
||||
- File uploads (`FileVar`) cannot be prefilled via URL parameters.
|
||||
|
||||
### Via the API
|
||||
|
||||
To run a script via the REST API, issue a POST request to the script's endpoint specifying the form data and commitment. For example, to run a script named `example.MyReport`, we would make a request such as the following:
|
||||
|
||||
@@ -7,7 +7,7 @@ Getting started with NetBox development is pretty straightforward, and should fe
|
||||
* A Linux system or compatible environment
|
||||
* A PostgreSQL server, which can be installed locally [per the documentation](../installation/1-postgresql.md)
|
||||
* A Redis server, which can also be [installed locally](../installation/2-redis.md)
|
||||
* Python 3.12 or later
|
||||
* Python 3.10 or later
|
||||
|
||||
### 1. Fork the Repo
|
||||
|
||||
|
||||
@@ -6,10 +6,14 @@ For end‑user guidance on resetting saved table layouts, see [Features > User P
|
||||
|
||||
## Available Preferences
|
||||
|
||||
| Name | Description |
|
||||
|--------------------------|---------------------------------------------------------------|
|
||||
| data_format | Preferred format when rendering raw data (JSON or YAML) |
|
||||
| pagination.per_page | The number of items to display per page of a paginated table |
|
||||
| pagination.placement | Where to display the paginator controls relative to the table |
|
||||
| tables.${table}.columns | The ordered list of columns to display when viewing the table |
|
||||
| tables.${table}.ordering | A list of column names by which the table should be ordered |
|
||||
| Name | Description |
|
||||
|----------------------------|---------------------------------------------------------------|
|
||||
| `csv_delimiter` | The delimiting character used when exporting CSV data |
|
||||
| `data_format` | Preferred format when rendering raw data (JSON or YAML) |
|
||||
| `locale.language` | The language selected for UI translation |
|
||||
| `pagination.per_page` | The number of items to display per page of a paginated table |
|
||||
| `pagination.placement` | Where to display the paginator controls relative to the table |
|
||||
| `tables.${table}.columns` | The ordered list of columns to display when viewing the table |
|
||||
| `tables.${table}.ordering` | A list of column names by which the table should be ordered |
|
||||
| `ui.copilot_enabled` | Toggles the NetBox Copilot AI agent |
|
||||
| `ui.tables.striping` | Toggles visual striping of tables in the UI |
|
||||
|
||||
@@ -8,7 +8,7 @@ NetBox's REST API, powered by the [Django REST Framework](https://www.django-res
|
||||
|
||||
```no-highlight
|
||||
curl -s -X POST \
|
||||
-H "Authorization: Bearer $TOKEN" \
|
||||
-H "Authorization: Token $TOKEN" \
|
||||
-H "Content-Type: application/json" \
|
||||
http://netbox/api/ipam/prefixes/ \
|
||||
--data '{"prefix": "192.0.2.0/24", "site": {"name": "Branch 12"}}'
|
||||
|
||||
@@ -90,10 +90,3 @@ http://netbox:8000/api/extras/config-templates/123/render/ \
|
||||
"bar": 123
|
||||
}'
|
||||
```
|
||||
|
||||
!!! note "Permissions"
|
||||
Rendering configuration templates via the REST API requires appropriate permissions for the relevant object type:
|
||||
|
||||
* To render a device's configuration via `/api/dcim/devices/{id}/render-config/`, assign a permission for "DCIM > Device" with the `render_config` action.
|
||||
* To render a virtual machine's configuration via `/api/virtualization/virtual-machines/{id}/render-config/`, assign a permission for "Virtualization > Virtual Machine" with the `render_config` action.
|
||||
* To render a config template directly via `/api/extras/config-templates/{id}/render/`, assign a permission for "Extras > Config Template" with the `render` action.
|
||||
|
||||
@@ -34,6 +34,9 @@ Sets the default number of rows displayed on paginated tables.
|
||||
### Paginator placement
|
||||
Controls where pagination controls are rendered relative to a table.
|
||||
|
||||
### HTMX navigation (experimental)
|
||||
Enables partial‑page navigation for supported views. Disable this preference if unexpected behavior is observed.
|
||||
|
||||
### Striped table rows
|
||||
Toggles alternating row backgrounds on tables.
|
||||
|
||||
|
||||
@@ -6,8 +6,8 @@ This section of the documentation discusses installing and configuring the NetBo
|
||||
|
||||
Begin by installing all system packages required by NetBox and its dependencies.
|
||||
|
||||
!!! warning "Python 3.12 or later required"
|
||||
NetBox supports only Python 3.12 or later.
|
||||
!!! warning "Python 3.10 or later required"
|
||||
NetBox supports Python 3.10, 3.11, and 3.12.
|
||||
|
||||
```no-highlight
|
||||
sudo apt install -y python3 python3-pip python3-venv python3-dev \
|
||||
@@ -15,7 +15,7 @@ build-essential libxml2-dev libxslt1-dev libffi-dev libpq-dev \
|
||||
libssl-dev zlib1g-dev
|
||||
```
|
||||
|
||||
Before continuing, check that your installed Python version is at least 3.12:
|
||||
Before continuing, check that your installed Python version is at least 3.10:
|
||||
|
||||
```no-highlight
|
||||
python3 -V
|
||||
@@ -120,23 +120,6 @@ If you are not yet sure what the domain name and/or IP address of the NetBox ins
|
||||
ALLOWED_HOSTS = ['*']
|
||||
```
|
||||
|
||||
### API_TOKEN_PEPPERS
|
||||
|
||||
Define at least one random cryptographic pepper, identified by a numeric ID starting at 1. This will be used to generate SHA256 checksums for API tokens.
|
||||
|
||||
```python
|
||||
API_TOKEN_PEPPERS = {
|
||||
# DO NOT USE THIS EXAMPLE PEPPER IN PRODUCTION
|
||||
1: 'kp7ht*76fiQAhUi5dHfASLlYUE_S^gI^(7J^K5M!LfoH@vl&b_',
|
||||
}
|
||||
```
|
||||
|
||||
!!! tip
|
||||
As with [`SECRET_KEY`](#secret_key) below, you can use the `generate_secret_key.py` script to generate a random pepper:
|
||||
```no-highlight
|
||||
python3 ../generate_secret_key.py
|
||||
```
|
||||
|
||||
### DATABASES
|
||||
|
||||
This parameter holds the PostgreSQL database configuration details. The default database must be defined; additional databases may be defined as needed e.g. by plugins.
|
||||
@@ -252,10 +235,10 @@ Once NetBox has been configured, we're ready to proceed with the actual installa
|
||||
sudo /opt/netbox/upgrade.sh
|
||||
```
|
||||
|
||||
Note that **Python 3.12 or later is required** for NetBox v4.5 and later releases. If the default Python installation on your server is set to a lesser version, pass the path to the supported installation as an environment variable named `PYTHON`. (Note that the environment variable must be passed _after_ the `sudo` command.)
|
||||
Note that **Python 3.10 or later is required** for NetBox v4.0 and later releases. If the default Python installation on your server is set to a lesser version, pass the path to the supported installation as an environment variable named `PYTHON`. (Note that the environment variable must be passed _after_ the `sudo` command.)
|
||||
|
||||
```no-highlight
|
||||
sudo PYTHON=/usr/bin/python3.12 /opt/netbox/upgrade.sh
|
||||
sudo PYTHON=/usr/bin/python3.10 /opt/netbox/upgrade.sh
|
||||
```
|
||||
|
||||
!!! note
|
||||
|
||||
@@ -60,3 +60,6 @@ You should see output similar to the following:
|
||||
If the NetBox service fails to start, issue the command `journalctl -eu netbox` to check for log messages that may indicate the problem.
|
||||
|
||||
Once you've verified that the WSGI workers are up and running, move on to HTTP server setup.
|
||||
|
||||
!!! note
|
||||
There is a bug in the current stable release of gunicorn (v21.2.0) where automatic restarts of the worker processes can result in 502 errors under heavy load. (See [gunicorn bug #3038](https://github.com/benoitc/gunicorn/issues/3038) for more detail.) Users who encounter this issue may opt to downgrade to an earlier, unaffected release of gunicorn (`pip install gunicorn==20.1.0`). Note, however, that this earlier release does not officially support Python 3.11.
|
||||
|
||||
@@ -121,6 +121,7 @@ AUTH_LDAP_MIRROR_GROUPS = True
|
||||
# Define special user types using groups. Exercise great caution when assigning superuser status.
|
||||
AUTH_LDAP_USER_FLAGS_BY_GROUP = {
|
||||
"is_active": "cn=active,ou=groups,dc=example,dc=com",
|
||||
"is_staff": "cn=staff,ou=groups,dc=example,dc=com",
|
||||
"is_superuser": "cn=superuser,ou=groups,dc=example,dc=com"
|
||||
}
|
||||
|
||||
@@ -133,6 +134,7 @@ AUTH_LDAP_CACHE_TIMEOUT = 3600
|
||||
```
|
||||
|
||||
* `is_active` - All users must be mapped to at least this group to enable authentication. Without this, users cannot log in.
|
||||
* `is_staff` - Users mapped to this group are enabled for access to the administration tools; this is the equivalent of checking the "staff status" box on a manually created user. This doesn't grant any specific permissions.
|
||||
* `is_superuser` - Users mapped to this group will be granted superuser status. Superusers are implicitly granted all permissions.
|
||||
|
||||
!!! warning
|
||||
@@ -246,6 +248,7 @@ AUTH_LDAP_MIRROR_GROUPS = True
|
||||
# Define special user types using groups. Exercise great caution when assigning superuser status.
|
||||
AUTH_LDAP_USER_FLAGS_BY_GROUP = {
|
||||
"is_active": "cn=active,ou=groups,dc=example,dc=com",
|
||||
"is_staff": "cn=staff,ou=groups,dc=example,dc=com",
|
||||
"is_superuser": "cn=superuser,ou=groups,dc=example,dc=com"
|
||||
}
|
||||
|
||||
|
||||
@@ -27,7 +27,7 @@ The following sections detail how to set up a new instance of NetBox:
|
||||
|
||||
| Dependency | Supported Versions |
|
||||
|------------|--------------------|
|
||||
| Python | 3.12, 3.13, 3.14 |
|
||||
| Python | 3.10, 3.11, 3.12 |
|
||||
| PostgreSQL | 14+ |
|
||||
| Redis | 4.0+ |
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@ NetBox requires the following dependencies:
|
||||
|
||||
| Dependency | Supported Versions |
|
||||
|------------|--------------------|
|
||||
| Python | 3.12, 3.13, 3.14 |
|
||||
| Python | 3.10, 3.11, 3.12 |
|
||||
| PostgreSQL | 14+ |
|
||||
| Redis | 4.0+ |
|
||||
|
||||
@@ -27,7 +27,6 @@ NetBox requires the following dependencies:
|
||||
|
||||
| NetBox Version | Python min | Python max | PostgreSQL min | Redis min | Documentation |
|
||||
|:--------------:|:----------:|:----------:|:--------------:|:---------:|:-----------------------------------------------------------------------------------------:|
|
||||
| 4.5 | 3.12 | 3.14 | 14 | 4.0 | [Link](https://github.com/netbox-community/netbox/blob/v4.5.0/docs/installation/index.md) |
|
||||
| 4.4 | 3.10 | 3.12 | 14 | 4.0 | [Link](https://github.com/netbox-community/netbox/blob/v4.4.0/docs/installation/index.md) |
|
||||
| 4.3 | 3.10 | 3.12 | 14 | 4.0 | [Link](https://github.com/netbox-community/netbox/blob/v4.3.0/docs/installation/index.md) |
|
||||
| 4.2 | 3.10 | 3.12 | 13 | 4.0 | [Link](https://github.com/netbox-community/netbox/blob/v4.2.0/docs/installation/index.md) |
|
||||
@@ -131,7 +130,7 @@ sudo ./upgrade.sh
|
||||
If the default version of Python is not at least 3.10, you'll need to pass the path to a supported Python version as an environment variable when calling the upgrade script. For example:
|
||||
|
||||
```no-highlight
|
||||
sudo PYTHON=/usr/bin/python3.12 ./upgrade.sh
|
||||
sudo PYTHON=/usr/bin/python3.10 ./upgrade.sh
|
||||
```
|
||||
|
||||
!!! note
|
||||
|
||||
@@ -80,7 +80,7 @@ Likewise, the site, rack, and device objects are located under the "DCIM" applic
|
||||
|
||||
The full hierarchy of available endpoints can be viewed by navigating to the API root in a web browser.
|
||||
|
||||
Each model generally has two views associated with it: a list view and a detail view. The list view is used to retrieve a list of multiple objects and to create new objects. The detail view is used to retrieve, update, or delete a single existing object. All objects are referenced by their numeric primary key (`id`).
|
||||
Each model generally has two views associated with it: a list view and a detail view. The list view is used to retrieve a list of multiple objects and to create new objects. The detail view is used to retrieve, update, or delete an single existing object. All objects are referenced by their numeric primary key (`id`).
|
||||
|
||||
* `/api/dcim/devices/` - List existing devices or create a new device
|
||||
* `/api/dcim/devices/123/` - Retrieve, update, or delete the device with ID 123
|
||||
@@ -653,22 +653,18 @@ The NetBox REST API primarily employs token-based authentication. For convenienc
|
||||
|
||||
### Tokens
|
||||
|
||||
A token is a secret, unique identifier mapped to a NetBox user account. Each user may have one or more tokens which he or she can use for authentication when making REST API requests. To create a token, navigate to the API tokens page under your user profile. When creating a token, NetBox will automatically populate a randomly-generated token value.
|
||||
|
||||
!!! note "Tokens cannot be retrieved once created"
|
||||
Once a token has been created, its plaintext value cannot be retrieved. For this reason, you must take care to securely record the token locally immediately upon its creation. If a token plaintext is lost, it cannot be recovered: A new token must be created.
|
||||
A token is a unique identifier mapped to a NetBox user account. Each user may have one or more tokens which he or she can use for authentication when making REST API requests. To create a token, navigate to the API tokens page under your user profile.
|
||||
|
||||
By default, all users can create and manage their own REST API tokens under the user control panel in the UI or via the REST API. This ability can be disabled by overriding the [`DEFAULT_PERMISSIONS`](../configuration/security.md#default_permissions) configuration parameter.
|
||||
|
||||
Each token contains a 160-bit key represented as 40 hexadecimal characters. When creating a token, you'll typically leave the key field blank so that a random key will be automatically generated. However, NetBox allows you to specify a key in case you need to restore a previously deleted token to operation.
|
||||
|
||||
Additionally, a token can be set to expire at a specific time. This can be useful if an external client needs to be granted temporary access to NetBox.
|
||||
|
||||
#### v1 and v2 Tokens
|
||||
!!! info "Restricting Token Retrieval"
|
||||
The ability to retrieve the key value of a previously-created API token can be restricted by disabling the [`ALLOW_TOKEN_RETRIEVAL`](../configuration/security.md#allow_token_retrieval) configuration parameter.
|
||||
|
||||
Beginning with NetBox v4.5, two versions of API token are supported, denoted as v1 and v2. Users are strongly encouraged to create only v2 tokens and to discontinue the use of v1 tokens. Support for v1 tokens will be removed in a future NetBox release.
|
||||
|
||||
v2 API tokens offer much stronger security. The token plaintext given at creation time is hashed together with a configured [cryptographic pepper](../configuration/required-parameters.md#api_token_peppers) to generate a unique checksum. This checksum is irreversible; the token plaintext is never stored on the server and thus cannot be retrieved even with database-level access.
|
||||
|
||||
#### Restricting Write Operations
|
||||
### Restricting Write Operations
|
||||
|
||||
By default, a token can be used to perform all actions via the API that a user would be permitted to do via the web UI. Deselecting the "write enabled" option will restrict API requests made with the token to read operations (e.g. GET) only.
|
||||
|
||||
@@ -685,22 +681,10 @@ It is possible to provision authentication tokens for other users via the REST A
|
||||
|
||||
### Authenticating to the API
|
||||
|
||||
An authentication token is included with a request in its `Authorization` header. The format of the header value depends on the version of token in use. v2 tokens use the following form, concatenating the token's prefix (`nbt_`) and key with its plaintext value, separated by a period:
|
||||
An authentication token is attached to a request by setting the `Authorization` header to the string `Token` followed by a space and the user's token:
|
||||
|
||||
```
|
||||
Authorization: Bearer nbt_<key>.<token>
|
||||
```
|
||||
|
||||
Legacy v1 tokens use the prefix `Token` rather than `Bearer`, and include only the token plaintext. (v1 tokens do not have a key.)
|
||||
|
||||
```
|
||||
Authorization: Token <token>
|
||||
```
|
||||
|
||||
Below is an example REST API request utilizing a v2 token.
|
||||
|
||||
```
|
||||
$ curl -H "Authorization: Bearer nbt_4F9DAouzURLb.zjebxBPzICiPbWz0Wtx0fTL7bCKXKGTYhNzkgC2S" \
|
||||
$ curl -H "Authorization: Token $TOKEN" \
|
||||
-H "Accept: application/json; indent=4" \
|
||||
https://netbox/api/dcim/sites/
|
||||
{
|
||||
|
||||
@@ -55,6 +55,27 @@ class MyModelViewSet(...):
|
||||
filterset_class = filtersets.MyModelFilterSet
|
||||
```
|
||||
|
||||
### Implementing Quick Search
|
||||
|
||||
The `ObjectListView` has a field called Quick Search. For Quick Search to work the corresponding FilterSet has to override the `search` method that is implemented in `NetBoxModelFilterSet`. This function takes a queryset and can perform arbitrary operations on it and return it. A common use-case is to search for the given search value in multiple fields:
|
||||
|
||||
```python
|
||||
from django.db.models import Q
|
||||
from netbox.filtersets import NetBoxModelFilterSet
|
||||
|
||||
class MyFilterSet(NetBoxModelFilterSet):
|
||||
...
|
||||
def search(self, queryset, name, value):
|
||||
if not value.strip():
|
||||
return queryset
|
||||
return queryset.filter(
|
||||
Q(name__icontains=value) |
|
||||
Q(description__icontains=value)
|
||||
)
|
||||
```
|
||||
|
||||
The `search` method is also used by the `q` filter in `NetBoxModelFilterSet` which in turn is used by the Search field in the filters tab.
|
||||
|
||||
## Filter Classes
|
||||
|
||||
### TagFilter
|
||||
|
||||
@@ -173,12 +173,12 @@ classifiers=[
|
||||
'Intended Audience :: Developers',
|
||||
'Natural Language :: English',
|
||||
"Programming Language :: Python :: 3 :: Only",
|
||||
'Programming Language :: Python :: 3.10',
|
||||
'Programming Language :: Python :: 3.11',
|
||||
'Programming Language :: Python :: 3.12',
|
||||
'Programming Language :: Python :: 3.13',
|
||||
'Programming Language :: Python :: 3.14',
|
||||
]
|
||||
|
||||
requires-python = ">=3.12.0"
|
||||
requires-python = ">=3.10.0"
|
||||
|
||||
```
|
||||
|
||||
@@ -195,7 +195,7 @@ python3 -m venv ~/.virtualenvs/my_plugin
|
||||
You can make NetBox available within this environment by creating a path file pointing to its location. This will add NetBox to the Python path upon activation. (Be sure to adjust the command below to specify your actual virtual environment path, Python version, and NetBox installation.)
|
||||
|
||||
```shell
|
||||
echo /opt/netbox/netbox > $VENV/lib/python3.12/site-packages/netbox.pth
|
||||
echo /opt/netbox/netbox > $VENV/lib/python3.10/site-packages/netbox.pth
|
||||
```
|
||||
|
||||
## Development Installation
|
||||
|
||||
@@ -64,17 +64,14 @@ item1 = PluginMenuItem(
|
||||
|
||||
A `PluginMenuItem` has the following attributes:
|
||||
|
||||
| Attribute | Required | Description |
|
||||
|-----------------|----------|------------------------------------------------------|
|
||||
| `link` | Yes | Name of the URL path to which this menu item links |
|
||||
| `link_text` | Yes | The text presented to the user |
|
||||
| `permissions` | - | A list of permissions required to display this link |
|
||||
| `auth_required` | - | Display only for authenticated users |
|
||||
| `staff_only` | - | Display only for superusers |
|
||||
| `buttons` | - | An iterable of PluginMenuButton instances to include |
|
||||
|
||||
!!! note "Changed in NetBox v4.5"
|
||||
In releases prior to NetBox v4.5, `staff_only` restricted display of a menu item to only users with `is_staff` set to True. In NetBox v4.5, the `is_staff` flag was removed from the user model. Menu items with `staff_only` set to True are now displayed only for superusers.
|
||||
| Attribute | Required | Description |
|
||||
|-----------------|----------|----------------------------------------------------------------------------------------------------------|
|
||||
| `link` | Yes | Name of the URL path to which this menu item links |
|
||||
| `link_text` | Yes | The text presented to the user |
|
||||
| `permissions` | - | A list of permissions required to display this link |
|
||||
| `auth_required` | - | Display only for authenticated users |
|
||||
| `staff_only` | - | Display only for users who have `is_staff` set to true (any specified permissions will also be required) |
|
||||
| `buttons` | - | An iterable of PluginMenuButton instances to include |
|
||||
|
||||
## Menu Buttons
|
||||
|
||||
|
||||
@@ -1,5 +1,35 @@
|
||||
# NetBox v4.4
|
||||
|
||||
## v4.4.5 (2025-10-28)
|
||||
|
||||
### Enhancements
|
||||
|
||||
* [#19751](https://github.com/netbox-community/netbox/issues/19751) - Disable occupied module bays in form dropdowns when installing a new module
|
||||
* [#20301](https://github.com/netbox-community/netbox/issues/20301) - Add a "dismiss all" option to the notifications dropdown
|
||||
* [#20399](https://github.com/netbox-community/netbox/issues/20399) - Add `assigned` and `primary` boolean filters for MAC addresses
|
||||
* [#20567](https://github.com/netbox-community/netbox/issues/20567) - Add contacts column to services table
|
||||
* [#20675](https://github.com/netbox-community/netbox/issues/20675) - Enable [NetBox Copilot](https://netboxlabs.com/products/netbox-copilot/) integration
|
||||
* [#20692](https://github.com/netbox-community/netbox/issues/20692) - Add contacts column to IP addresses table
|
||||
* [#20700](https://github.com/netbox-community/netbox/issues/20700) - Add contacts table column for various additional models
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* [#19872](https://github.com/netbox-community/netbox/issues/19872) - Ensure custom script validation failures display error messages
|
||||
* [#20389](https://github.com/netbox-community/netbox/issues/20389) - Fix "select all" behavior for bulk rename views
|
||||
* [#20422](https://github.com/netbox-community/netbox/issues/20422) - Enable filtering of aggregates and prefixes by family in GraphQL API
|
||||
* [#20459](https://github.com/netbox-community/netbox/issues/20459) - Fix validation of `is_oob` & `is_primary` fields under IP address bulk import
|
||||
* [#20466](https://github.com/netbox-community/netbox/issues/20466) - Fix querying of devices with a primary IP assigned in GraphQL API
|
||||
* [#20498](https://github.com/netbox-community/netbox/issues/20498) - Enforce the validation regex (if set) for custom URL fields
|
||||
* [#20524](https://github.com/netbox-community/netbox/issues/20524) - Raise a validation error when attempting to schedule a custom script for a past date/time
|
||||
* [#20541](https://github.com/netbox-community/netbox/issues/20541) - Fix resolution of GraphQL object fields which rely on custom filters
|
||||
* [#20551](https://github.com/netbox-community/netbox/issues/20551) - Fix automatic slug generation in quick-add UI form
|
||||
* [#20606](https://github.com/netbox-community/netbox/issues/20606) - Enable copying of values from table columns rendered as badges
|
||||
* [#20641](https://github.com/netbox-community/netbox/issues/20641) - Fix `AttributeError` exception raised by the object changes REST API endpoint
|
||||
* [#20646](https://github.com/netbox-community/netbox/issues/20646) - Prevent cables from connecting to objects marked as connected
|
||||
* [#20655](https://github.com/netbox-community/netbox/issues/20655) - Fix `FieldError` exception when attempting to sort permissions list by actions
|
||||
|
||||
---
|
||||
|
||||
## v4.4.4 (2025-10-15)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
57
netbox/account/tables.py
Normal file
57
netbox/account/tables.py
Normal file
@@ -0,0 +1,57 @@
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
from account.models import UserToken
|
||||
from netbox.tables import NetBoxTable, columns
|
||||
|
||||
__all__ = (
|
||||
'UserTokenTable',
|
||||
)
|
||||
|
||||
|
||||
TOKEN = """<samp><span id="token_{{ record.pk }}">{{ record }}</span></samp>"""
|
||||
|
||||
ALLOWED_IPS = """{{ value|join:", " }}"""
|
||||
|
||||
COPY_BUTTON = """
|
||||
{% if settings.ALLOW_TOKEN_RETRIEVAL %}
|
||||
{% copy_content record.pk prefix="token_" color="success" %}
|
||||
{% endif %}
|
||||
"""
|
||||
|
||||
|
||||
class UserTokenTable(NetBoxTable):
|
||||
"""
|
||||
Table for users to manager their own API tokens under account views.
|
||||
"""
|
||||
key = columns.TemplateColumn(
|
||||
verbose_name=_('Key'),
|
||||
template_code=TOKEN,
|
||||
)
|
||||
write_enabled = columns.BooleanColumn(
|
||||
verbose_name=_('Write Enabled')
|
||||
)
|
||||
created = columns.DateTimeColumn(
|
||||
timespec='minutes',
|
||||
verbose_name=_('Created'),
|
||||
)
|
||||
expires = columns.DateTimeColumn(
|
||||
timespec='minutes',
|
||||
verbose_name=_('Expires'),
|
||||
)
|
||||
last_used = columns.DateTimeColumn(
|
||||
verbose_name=_('Last Used'),
|
||||
)
|
||||
allowed_ips = columns.TemplateColumn(
|
||||
verbose_name=_('Allowed IPs'),
|
||||
template_code=ALLOWED_IPS
|
||||
)
|
||||
actions = columns.ActionsColumn(
|
||||
actions=('edit', 'delete'),
|
||||
extra_buttons=COPY_BUTTON
|
||||
)
|
||||
|
||||
class Meta(NetBoxTable.Meta):
|
||||
model = UserToken
|
||||
fields = (
|
||||
'pk', 'id', 'key', 'description', 'write_enabled', 'created', 'expires', 'last_used', 'allowed_ips',
|
||||
)
|
||||
@@ -26,9 +26,8 @@ from extras.tables import BookmarkTable, NotificationTable, SubscriptionTable
|
||||
from netbox.authentication import get_auth_backend_display, get_saml_idps
|
||||
from netbox.config import get_config
|
||||
from netbox.views import generic
|
||||
from users import forms
|
||||
from users import forms, tables
|
||||
from users.models import UserConfig
|
||||
from users.tables import TokenTable
|
||||
from utilities.request import safe_for_redirect
|
||||
from utilities.string import remove_linebreaks
|
||||
from utilities.views import register_model_view
|
||||
@@ -329,8 +328,7 @@ class UserTokenListView(LoginRequiredMixin, View):
|
||||
|
||||
def get(self, request):
|
||||
tokens = UserToken.objects.filter(user=request.user)
|
||||
table = TokenTable(tokens)
|
||||
table.columns.hide('user')
|
||||
table = tables.UserTokenTable(tokens)
|
||||
table.configure(request)
|
||||
|
||||
return render(request, 'account/token_list.html', {
|
||||
@@ -345,9 +343,11 @@ class UserTokenView(LoginRequiredMixin, View):
|
||||
|
||||
def get(self, request, pk):
|
||||
token = get_object_or_404(UserToken.objects.filter(user=request.user), pk=pk)
|
||||
key = token.key if settings.ALLOW_TOKEN_RETRIEVAL else None
|
||||
|
||||
return render(request, 'account/token.html', {
|
||||
'object': token,
|
||||
'key': key,
|
||||
})
|
||||
|
||||
|
||||
|
||||
@@ -1,97 +0,0 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
PATTERN_OPS_INDEXES = [
|
||||
'circuits_circuitgroup_name_ec8ac1e5_like',
|
||||
'circuits_circuitgroup_slug_61ca866b_like',
|
||||
'circuits_circuittype_name_8256ea9a_like',
|
||||
'circuits_circuittype_slug_9b4b3cf9_like',
|
||||
'circuits_provider_name_8f2514f5_like',
|
||||
'circuits_provider_slug_c3c0aa10_like',
|
||||
'circuits_virtualcircuittype_name_5184db16_like',
|
||||
'circuits_virtualcircuittype_slug_75d5c661_like',
|
||||
]
|
||||
|
||||
|
||||
def remove_indexes(apps, schema_editor):
|
||||
for idx in PATTERN_OPS_INDEXES:
|
||||
schema_editor.execute(f'DROP INDEX IF EXISTS {idx}')
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('circuits', '0052_extend_circuit_abs_distance_upper_limit'),
|
||||
('dcim', '0217_ci_collations'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
code=remove_indexes,
|
||||
reverse_code=migrations.RunPython.noop,
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='circuit',
|
||||
name='cid',
|
||||
field=models.CharField(db_collation='case_insensitive', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='circuitgroup',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='circuitgroup',
|
||||
name='slug',
|
||||
field=models.SlugField(db_collation='case_insensitive', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='circuittype',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='circuittype',
|
||||
name='slug',
|
||||
field=models.SlugField(db_collation='case_insensitive', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='provider',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='provider',
|
||||
name='slug',
|
||||
field=models.SlugField(db_collation='case_insensitive', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='provideraccount',
|
||||
name='account',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='provideraccount',
|
||||
name='name',
|
||||
field=models.CharField(blank=True, db_collation='ci_natural_sort', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='providernetwork',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='virtualcircuit',
|
||||
name='cid',
|
||||
field=models.CharField(db_collation='case_insensitive', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='virtualcircuittype',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='virtualcircuittype',
|
||||
name='slug',
|
||||
field=models.SlugField(db_collation='case_insensitive', max_length=100, unique=True),
|
||||
),
|
||||
]
|
||||
@@ -41,10 +41,9 @@ class Circuit(ContactsMixin, ImageAttachmentsMixin, DistanceMixin, PrimaryModel)
|
||||
ProviderAccount. Circuit port speed and commit rate are measured in Kbps.
|
||||
"""
|
||||
cid = models.CharField(
|
||||
verbose_name=_('circuit ID'),
|
||||
max_length=100,
|
||||
db_collation='case_insensitive',
|
||||
help_text=_('Unique circuit ID'),
|
||||
verbose_name=_('circuit ID'),
|
||||
help_text=_('Unique circuit ID')
|
||||
)
|
||||
provider = models.ForeignKey(
|
||||
to='circuits.Provider',
|
||||
|
||||
@@ -21,14 +21,13 @@ class Provider(ContactsMixin, PrimaryModel):
|
||||
verbose_name=_('name'),
|
||||
max_length=100,
|
||||
unique=True,
|
||||
db_collation='ci_natural_sort',
|
||||
help_text=_('Full name of the provider'),
|
||||
db_collation="natural_sort"
|
||||
)
|
||||
slug = models.SlugField(
|
||||
verbose_name=_('slug'),
|
||||
max_length=100,
|
||||
unique=True,
|
||||
db_collation='case_insensitive',
|
||||
unique=True
|
||||
)
|
||||
asns = models.ManyToManyField(
|
||||
to='ipam.ASN',
|
||||
@@ -57,15 +56,13 @@ class ProviderAccount(ContactsMixin, PrimaryModel):
|
||||
related_name='accounts'
|
||||
)
|
||||
account = models.CharField(
|
||||
verbose_name=_('account ID'),
|
||||
max_length=100,
|
||||
db_collation='ci_natural_sort',
|
||||
verbose_name=_('account ID')
|
||||
)
|
||||
name = models.CharField(
|
||||
verbose_name=_('name'),
|
||||
max_length=100,
|
||||
db_collation='ci_natural_sort',
|
||||
blank=True,
|
||||
blank=True
|
||||
)
|
||||
|
||||
clone_fields = ('provider', )
|
||||
@@ -100,7 +97,7 @@ class ProviderNetwork(PrimaryModel):
|
||||
name = models.CharField(
|
||||
verbose_name=_('name'),
|
||||
max_length=100,
|
||||
db_collation='ci_natural_sort',
|
||||
db_collation="natural_sort"
|
||||
)
|
||||
provider = models.ForeignKey(
|
||||
to='circuits.Provider',
|
||||
|
||||
@@ -34,10 +34,9 @@ class VirtualCircuit(PrimaryModel):
|
||||
A virtual connection between two or more endpoints, delivered across one or more physical circuits.
|
||||
"""
|
||||
cid = models.CharField(
|
||||
verbose_name=_('circuit ID'),
|
||||
max_length=100,
|
||||
db_collation='case_insensitive',
|
||||
help_text=_('Unique circuit ID'),
|
||||
verbose_name=_('circuit ID'),
|
||||
help_text=_('Unique circuit ID')
|
||||
)
|
||||
provider_network = models.ForeignKey(
|
||||
to='circuits.ProviderNetwork',
|
||||
|
||||
@@ -83,6 +83,7 @@ class ProviderBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(Provider, 'bulk_rename', path='rename', detail=False)
|
||||
class ProviderBulkRenameView(generic.BulkRenameView):
|
||||
queryset = Provider.objects.all()
|
||||
filterset = filtersets.ProviderFilterSet
|
||||
|
||||
|
||||
@register_model_view(Provider, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -150,6 +151,7 @@ class ProviderAccountBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(ProviderAccount, 'bulk_rename', path='rename', detail=False)
|
||||
class ProviderAccountBulkRenameView(generic.BulkRenameView):
|
||||
queryset = ProviderAccount.objects.all()
|
||||
filterset = filtersets.ProviderAccountFilterSet
|
||||
|
||||
|
||||
@register_model_view(ProviderAccount, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -226,6 +228,7 @@ class ProviderNetworkBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(ProviderNetwork, 'bulk_rename', path='rename', detail=False)
|
||||
class ProviderNetworkBulkRenameView(generic.BulkRenameView):
|
||||
queryset = ProviderNetwork.objects.all()
|
||||
filterset = filtersets.ProviderNetworkFilterSet
|
||||
|
||||
|
||||
@register_model_view(ProviderNetwork, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -290,6 +293,7 @@ class CircuitTypeBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(CircuitType, 'bulk_rename', path='rename', detail=False)
|
||||
class CircuitTypeBulkRenameView(generic.BulkRenameView):
|
||||
queryset = CircuitType.objects.all()
|
||||
filterset = filtersets.CircuitTypeFilterSet
|
||||
|
||||
|
||||
@register_model_view(CircuitType, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -362,6 +366,7 @@ class CircuitBulkEditView(generic.BulkEditView):
|
||||
class CircuitBulkRenameView(generic.BulkRenameView):
|
||||
queryset = Circuit.objects.all()
|
||||
field_name = 'cid'
|
||||
filterset = filtersets.CircuitFilterSet
|
||||
|
||||
|
||||
@register_model_view(Circuit, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -557,6 +562,7 @@ class CircuitGroupBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(CircuitGroup, 'bulk_rename', path='rename', detail=False)
|
||||
class CircuitGroupBulkRenameView(generic.BulkRenameView):
|
||||
queryset = CircuitGroup.objects.all()
|
||||
filterset = filtersets.CircuitGroupFilterSet
|
||||
|
||||
|
||||
@register_model_view(CircuitGroup, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -672,6 +678,7 @@ class VirtualCircuitTypeBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(VirtualCircuitType, 'bulk_rename', path='rename', detail=False)
|
||||
class VirtualCircuitTypeBulkRenameView(generic.BulkRenameView):
|
||||
queryset = VirtualCircuitType.objects.all()
|
||||
filterset = filtersets.VirtualCircuitTypeFilterSet
|
||||
|
||||
|
||||
@register_model_view(VirtualCircuitType, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -744,6 +751,7 @@ class VirtualCircuitBulkEditView(generic.BulkEditView):
|
||||
class VirtualCircuitBulkRenameView(generic.BulkRenameView):
|
||||
queryset = VirtualCircuit.objects.all()
|
||||
field_name = 'cid'
|
||||
filterset = filtersets.VirtualCircuitFilterSet
|
||||
|
||||
|
||||
@register_model_view(VirtualCircuit, 'bulk_delete', path='delete', detail=False)
|
||||
|
||||
@@ -9,6 +9,7 @@ from drf_spectacular.utils import OpenApiParameter, extend_schema
|
||||
from rest_framework import viewsets
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
from rest_framework.permissions import IsAdminUser
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.routers import APIRootView
|
||||
from rest_framework.viewsets import ReadOnlyModelViewSet
|
||||
@@ -23,7 +24,7 @@ from netbox.api.authentication import IsAuthenticatedOrLoginNotRequired
|
||||
from netbox.api.metadata import ContentTypeMetadata
|
||||
from netbox.api.pagination import LimitOffsetListPagination
|
||||
from netbox.api.viewsets import NetBoxModelViewSet, NetBoxReadOnlyModelViewSet
|
||||
from utilities.api import IsSuperuser
|
||||
|
||||
from . import serializers
|
||||
|
||||
|
||||
@@ -99,7 +100,7 @@ class BaseRQViewSet(viewsets.ViewSet):
|
||||
"""
|
||||
Base class for RQ view sets. Provides a list() method. Subclasses must implement get_data().
|
||||
"""
|
||||
permission_classes = [IsSuperuser]
|
||||
permission_classes = [IsAdminUser]
|
||||
serializer_class = None
|
||||
|
||||
def get_data(self):
|
||||
|
||||
@@ -166,8 +166,8 @@ class ConfigRevisionForm(forms.ModelForm, metaclass=ConfigFormMetaclass):
|
||||
FieldSet('CUSTOM_VALIDATORS', 'PROTECTION_RULES', name=_('Validation')),
|
||||
FieldSet('DEFAULT_USER_PREFERENCES', name=_('User Preferences')),
|
||||
FieldSet(
|
||||
'MAINTENANCE_MODE', 'GRAPHQL_ENABLED', 'CHANGELOG_RETENTION', 'JOB_RETENTION', 'MAPS_URL',
|
||||
name=_('Miscellaneous')
|
||||
'MAINTENANCE_MODE', 'COPILOT_ENABLED', 'GRAPHQL_ENABLED', 'CHANGELOG_RETENTION', 'JOB_RETENTION',
|
||||
'MAPS_URL', name=_('Miscellaneous'),
|
||||
),
|
||||
FieldSet('comment', name=_('Config Revision'))
|
||||
)
|
||||
|
||||
@@ -1,30 +0,0 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
PATTERN_OPS_INDEXES = [
|
||||
'core_datasource_name_17788499_like',
|
||||
]
|
||||
|
||||
|
||||
def remove_indexes(apps, schema_editor):
|
||||
for idx in PATTERN_OPS_INDEXES:
|
||||
schema_editor.execute(f'DROP INDEX IF EXISTS {idx}')
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0019_configrevision_active'),
|
||||
('dcim', '0217_ci_collations'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
code=remove_indexes,
|
||||
reverse_code=migrations.RunPython.noop,
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='datasource',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100, unique=True),
|
||||
),
|
||||
]
|
||||
3
netbox/core/models/contenttypes.py
Normal file
3
netbox/core/models/contenttypes.py
Normal file
@@ -0,0 +1,3 @@
|
||||
# TODO: Remove this module in NetBox v4.5
|
||||
# Provided for backward compatibility
|
||||
from .object_types import *
|
||||
@@ -38,8 +38,7 @@ class DataSource(JobsMixin, PrimaryModel):
|
||||
name = models.CharField(
|
||||
verbose_name=_('name'),
|
||||
max_length=100,
|
||||
unique=True,
|
||||
db_collation='ci_natural_sort',
|
||||
unique=True
|
||||
)
|
||||
type = models.CharField(
|
||||
verbose_name=_('type'),
|
||||
|
||||
@@ -8,7 +8,6 @@ from rq.job import Job as RQ_Job, JobStatus
|
||||
from rq.registry import FailedJobRegistry, StartedJobRegistry
|
||||
|
||||
from rest_framework import status
|
||||
from users.constants import TOKEN_PREFIX
|
||||
from users.models import Token, User
|
||||
from utilities.testing import APITestCase, APIViewTestCases, TestCase
|
||||
from utilities.testing.utils import disable_logging
|
||||
@@ -108,14 +107,14 @@ class ObjectTypeTest(APITestCase):
|
||||
def test_list_objects(self):
|
||||
object_type_count = ObjectType.objects.count()
|
||||
|
||||
response = self.client.get(reverse('core-api:objecttype-list'), **self.header)
|
||||
response = self.client.get(reverse('extras-api:objecttype-list'), **self.header)
|
||||
self.assertHttpStatus(response, status.HTTP_200_OK)
|
||||
self.assertEqual(response.data['count'], object_type_count)
|
||||
|
||||
def test_get_object(self):
|
||||
object_type = ObjectType.objects.first()
|
||||
|
||||
url = reverse('core-api:objecttype-detail', kwargs={'pk': object_type.pk})
|
||||
url = reverse('extras-api:objecttype-detail', kwargs={'pk': object_type.pk})
|
||||
self.assertHttpStatus(self.client.get(url, **self.header), status.HTTP_200_OK)
|
||||
|
||||
|
||||
@@ -135,9 +134,12 @@ class BackgroundTaskTestCase(TestCase):
|
||||
Create a user and token for API calls.
|
||||
"""
|
||||
# Create the test user and assign permissions
|
||||
self.user = User.objects.create_user(username='testuser', is_active=True)
|
||||
self.user = User.objects.create_user(username='testuser')
|
||||
self.user.is_staff = True
|
||||
self.user.is_active = True
|
||||
self.user.save()
|
||||
self.token = Token.objects.create(user=self.user)
|
||||
self.header = {'HTTP_AUTHORIZATION': f'Bearer {TOKEN_PREFIX}{self.token.key}.{self.token.token}'}
|
||||
self.header = {'HTTP_AUTHORIZATION': f'Token {self.token.key}'}
|
||||
|
||||
# Clear all queues prior to running each test
|
||||
get_queue('default').connection.flushall()
|
||||
@@ -148,11 +150,13 @@ class BackgroundTaskTestCase(TestCase):
|
||||
url = reverse('core-api:rqqueue-list')
|
||||
|
||||
# Attempt to load view without permission
|
||||
self.user.is_staff = False
|
||||
self.user.save()
|
||||
response = self.client.get(url, **self.header)
|
||||
self.assertEqual(response.status_code, 403)
|
||||
|
||||
# Load view with permission
|
||||
self.user.is_superuser = True
|
||||
self.user.is_staff = True
|
||||
self.user.save()
|
||||
response = self.client.get(url, **self.header)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
@@ -161,16 +165,7 @@ class BackgroundTaskTestCase(TestCase):
|
||||
self.assertIn('low', str(response.content))
|
||||
|
||||
def test_background_queue(self):
|
||||
url = reverse('core-api:rqqueue-detail', args=['default'])
|
||||
|
||||
# Attempt to load view without permission
|
||||
response = self.client.get(url, **self.header)
|
||||
self.assertEqual(response.status_code, 403)
|
||||
|
||||
# Load view with permission
|
||||
self.user.is_superuser = True
|
||||
self.user.save()
|
||||
response = self.client.get(url, **self.header)
|
||||
response = self.client.get(reverse('core-api:rqqueue-detail', args=['default']), **self.header)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertIn('default', str(response.content))
|
||||
self.assertIn('oldest_job_timestamp', str(response.content))
|
||||
@@ -179,16 +174,8 @@ class BackgroundTaskTestCase(TestCase):
|
||||
def test_background_task_list(self):
|
||||
queue = get_queue('default')
|
||||
queue.enqueue(self.dummy_job_default)
|
||||
url = reverse('core-api:rqtask-list')
|
||||
|
||||
# Attempt to load view without permission
|
||||
response = self.client.get(url, **self.header)
|
||||
self.assertEqual(response.status_code, 403)
|
||||
|
||||
# Load view with permission
|
||||
self.user.is_superuser = True
|
||||
self.user.save()
|
||||
response = self.client.get(url, **self.header)
|
||||
response = self.client.get(reverse('core-api:rqtask-list'), **self.header)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertIn('origin', str(response.content))
|
||||
self.assertIn('core.tests.test_api.BackgroundTaskTestCase.dummy_job_default()', str(response.content))
|
||||
@@ -196,16 +183,8 @@ class BackgroundTaskTestCase(TestCase):
|
||||
def test_background_task(self):
|
||||
queue = get_queue('default')
|
||||
job = queue.enqueue(self.dummy_job_default)
|
||||
url = reverse('core-api:rqtask-detail', args=[job.id])
|
||||
|
||||
# Attempt to load view without permission
|
||||
response = self.client.get(url, **self.header)
|
||||
self.assertEqual(response.status_code, 403)
|
||||
|
||||
# Load view with permission
|
||||
self.user.is_superuser = True
|
||||
self.user.save()
|
||||
response = self.client.get(url, **self.header)
|
||||
response = self.client.get(reverse('core-api:rqtask-detail', args=[job.id]), **self.header)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertIn(str(job.id), str(response.content))
|
||||
self.assertIn('origin', str(response.content))
|
||||
@@ -215,65 +194,45 @@ class BackgroundTaskTestCase(TestCase):
|
||||
def test_background_task_delete(self):
|
||||
queue = get_queue('default')
|
||||
job = queue.enqueue(self.dummy_job_default)
|
||||
url = reverse('core-api:rqtask-delete', args=[job.id])
|
||||
|
||||
# Attempt to load view without permission
|
||||
response = self.client.get(url, **self.header)
|
||||
self.assertEqual(response.status_code, 403)
|
||||
|
||||
# Load view with permission
|
||||
self.user.is_superuser = True
|
||||
self.user.save()
|
||||
response = self.client.post(url, **self.header)
|
||||
response = self.client.post(reverse('core-api:rqtask-delete', args=[job.id]), **self.header)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertFalse(RQ_Job.exists(job.id, connection=queue.connection))
|
||||
queue = get_queue('default')
|
||||
self.assertNotIn(job.id, queue.job_ids)
|
||||
|
||||
def test_background_task_requeue(self):
|
||||
# Enqueue & run a job that will fail
|
||||
queue = get_queue('default')
|
||||
|
||||
# Enqueue & run a job that will fail
|
||||
job = queue.enqueue(self.dummy_job_failing)
|
||||
worker = get_worker('default')
|
||||
with disable_logging():
|
||||
worker.work(burst=True)
|
||||
self.assertTrue(job.is_failed)
|
||||
url = reverse('core-api:rqtask-requeue', args=[job.id])
|
||||
|
||||
# Attempt to requeue the job without permission
|
||||
response = self.client.post(url, **self.header)
|
||||
self.assertEqual(response.status_code, 403)
|
||||
|
||||
# Re-enqueue the failed job and check that its status has been reset
|
||||
self.user.is_superuser = True
|
||||
self.user.save()
|
||||
response = self.client.post(url, **self.header)
|
||||
response = self.client.post(reverse('core-api:rqtask-requeue', args=[job.id]), **self.header)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
job = RQ_Job.fetch(job.id, queue.connection)
|
||||
self.assertFalse(job.is_failed)
|
||||
|
||||
def test_background_task_enqueue(self):
|
||||
# Enqueue some jobs that each depends on its predecessor
|
||||
queue = get_queue('default')
|
||||
|
||||
# Enqueue some jobs that each depends on its predecessor
|
||||
job = previous_job = None
|
||||
for _ in range(0, 3):
|
||||
job = queue.enqueue(self.dummy_job_default, depends_on=previous_job)
|
||||
previous_job = job
|
||||
url = reverse('core-api:rqtask-enqueue', args=[job.id])
|
||||
|
||||
# Check that the last job to be enqueued has a status of deferred
|
||||
self.assertIsNotNone(job)
|
||||
self.assertEqual(job.get_status(), JobStatus.DEFERRED)
|
||||
self.assertIsNone(job.enqueued_at)
|
||||
|
||||
# Attempt to force-enqueue the job without permission
|
||||
response = self.client.post(url, **self.header)
|
||||
self.assertEqual(response.status_code, 403)
|
||||
|
||||
# Force-enqueue the deferred job
|
||||
self.user.is_superuser = True
|
||||
self.user.save()
|
||||
response = self.client.post(url, **self.header)
|
||||
response = self.client.post(reverse('core-api:rqtask-enqueue', args=[job.id]), **self.header)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
# Check that job's status is updated correctly
|
||||
@@ -283,27 +242,19 @@ class BackgroundTaskTestCase(TestCase):
|
||||
|
||||
def test_background_task_stop(self):
|
||||
queue = get_queue('default')
|
||||
|
||||
worker = get_worker('default')
|
||||
job = queue.enqueue(self.dummy_job_default)
|
||||
worker.prepare_job_execution(job)
|
||||
url = reverse('core-api:rqtask-stop', args=[job.id])
|
||||
|
||||
self.assertEqual(job.get_status(), JobStatus.STARTED)
|
||||
|
||||
# Attempt to stop the task without permission
|
||||
response = self.client.post(url, **self.header)
|
||||
self.assertEqual(response.status_code, 403)
|
||||
|
||||
# Stop the task
|
||||
self.user.is_superuser = True
|
||||
self.user.save()
|
||||
response = self.client.post(url, **self.header)
|
||||
response = self.client.post(reverse('core-api:rqtask-stop', args=[job.id]), **self.header)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
with disable_logging():
|
||||
worker.monitor_work_horse(job, queue) # Sets the job as Failed and removes from Started
|
||||
started_job_registry = StartedJobRegistry(queue.name, connection=queue.connection)
|
||||
self.assertEqual(len(started_job_registry), 0)
|
||||
|
||||
# Verify that the task was cancelled
|
||||
canceled_job_registry = FailedJobRegistry(queue.name, connection=queue.connection)
|
||||
self.assertEqual(len(canceled_job_registry), 1)
|
||||
self.assertIn(job.id, canceled_job_registry)
|
||||
@@ -311,34 +262,19 @@ class BackgroundTaskTestCase(TestCase):
|
||||
def test_worker_list(self):
|
||||
worker1 = get_worker('default', name=uuid.uuid4().hex)
|
||||
worker1.register_birth()
|
||||
|
||||
worker2 = get_worker('high')
|
||||
worker2.register_birth()
|
||||
url = reverse('core-api:rqworker-list')
|
||||
|
||||
# Attempt to fetch the worker list without permission
|
||||
response = self.client.get(url, **self.header)
|
||||
self.assertEqual(response.status_code, 403)
|
||||
|
||||
# Fetch the worker list
|
||||
self.user.is_superuser = True
|
||||
self.user.save()
|
||||
response = self.client.get(url, **self.header)
|
||||
response = self.client.get(reverse('core-api:rqworker-list'), **self.header)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertIn(str(worker1.name), str(response.content))
|
||||
|
||||
def test_worker(self):
|
||||
worker1 = get_worker('default', name=uuid.uuid4().hex)
|
||||
worker1.register_birth()
|
||||
url = reverse('core-api:rqworker-detail', args=[worker1.name])
|
||||
|
||||
# Attempt to fetch a worker without permission
|
||||
response = self.client.get(url, **self.header)
|
||||
self.assertEqual(response.status_code, 403)
|
||||
|
||||
# Fetch the worker
|
||||
self.user.is_superuser = True
|
||||
self.user.save()
|
||||
response = self.client.get(url, **self.header)
|
||||
response = self.client.get(reverse('core-api:rqworker-detail', args=[worker1.name]), **self.header)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertIn(str(worker1.name), str(response.content))
|
||||
self.assertIn('birth_date', str(response.content))
|
||||
|
||||
@@ -158,7 +158,7 @@ class BackgroundTaskTestCase(TestCase):
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.user.is_superuser = True
|
||||
self.user.is_staff = True
|
||||
self.user.is_active = True
|
||||
self.user.save()
|
||||
|
||||
@@ -171,13 +171,13 @@ class BackgroundTaskTestCase(TestCase):
|
||||
url = reverse('core:background_queue_list')
|
||||
|
||||
# Attempt to load view without permission
|
||||
self.user.is_superuser = False
|
||||
self.user.is_staff = False
|
||||
self.user.save()
|
||||
response = self.client.get(url)
|
||||
self.assertEqual(response.status_code, 403)
|
||||
|
||||
# Load view with permission
|
||||
self.user.is_superuser = True
|
||||
self.user.is_staff = True
|
||||
self.user.save()
|
||||
response = self.client.get(url)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
@@ -356,7 +356,7 @@ class SystemTestCase(TestCase):
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
|
||||
self.user.is_superuser = True
|
||||
self.user.is_staff = True
|
||||
self.user.save()
|
||||
|
||||
def test_system_view_default(self):
|
||||
|
||||
@@ -125,6 +125,7 @@ class DataSourceBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(DataSource, 'bulk_rename', path='rename', detail=False)
|
||||
class DataSourceBulkRenameView(generic.BulkRenameView):
|
||||
queryset = DataSource.objects.all()
|
||||
filterset = filtersets.DataSourceFilterSet
|
||||
|
||||
|
||||
@register_model_view(DataSource, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -372,7 +373,7 @@ class ConfigRevisionRestoreView(ContentTypePermissionRequiredMixin, View):
|
||||
class BaseRQView(UserPassesTestMixin, View):
|
||||
|
||||
def test_func(self):
|
||||
return self.request.user.is_superuser
|
||||
return self.request.user.is_staff
|
||||
|
||||
|
||||
class BackgroundQueueListView(TableMixin, BaseRQView):
|
||||
@@ -555,7 +556,7 @@ class WorkerView(BaseRQView):
|
||||
class SystemView(UserPassesTestMixin, View):
|
||||
|
||||
def test_func(self):
|
||||
return self.request.user.is_superuser
|
||||
return self.request.user.is_staff
|
||||
|
||||
def get(self, request):
|
||||
|
||||
@@ -638,7 +639,7 @@ class BasePluginView(UserPassesTestMixin, View):
|
||||
CACHE_KEY_CATALOG_ERROR = 'plugins-catalog-error'
|
||||
|
||||
def test_func(self):
|
||||
return self.request.user.is_superuser
|
||||
return self.request.user.is_staff
|
||||
|
||||
def get_cached_plugins(self, request):
|
||||
catalog_plugins = {}
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from drf_spectacular.utils import extend_schema_field
|
||||
from rest_framework import serializers
|
||||
|
||||
from dcim.choices import *
|
||||
from dcim.constants import *
|
||||
from dcim.models import Cable, CablePath, CableTermination
|
||||
from netbox.api.fields import ChoiceField, ContentTypeField
|
||||
from netbox.api.serializers import BaseModelSerializer, GenericObjectSerializer, NetBoxModelSerializer
|
||||
@@ -49,11 +51,9 @@ class TracedCableSerializer(BaseModelSerializer):
|
||||
|
||||
class CableTerminationSerializer(NetBoxModelSerializer):
|
||||
termination_type = ContentTypeField(
|
||||
read_only=True,
|
||||
)
|
||||
termination = serializers.SerializerMethodField(
|
||||
read_only=True,
|
||||
queryset=ContentType.objects.filter(CABLE_TERMINATION_MODELS)
|
||||
)
|
||||
termination = serializers.SerializerMethodField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = CableTermination
|
||||
@@ -61,8 +61,6 @@ class CableTerminationSerializer(NetBoxModelSerializer):
|
||||
'id', 'url', 'display', 'cable', 'cable_end', 'termination_type', 'termination_id',
|
||||
'termination', 'created', 'last_updated',
|
||||
]
|
||||
read_only_fields = fields
|
||||
brief_fields = ('id', 'url', 'display', 'cable', 'cable_end', 'termination_type', 'termination_id')
|
||||
|
||||
@extend_schema_field(serializers.JSONField(allow_null=True))
|
||||
def get_termination(self, obj):
|
||||
|
||||
@@ -155,7 +155,7 @@ class PowerOutletTemplateSerializer(ComponentTemplateSerializer):
|
||||
model = PowerOutletTemplate
|
||||
fields = [
|
||||
'id', 'url', 'display', 'device_type', 'module_type', 'name', 'label', 'type',
|
||||
'color', 'power_port', 'feed_leg', 'description', 'created', 'last_updated',
|
||||
'power_port', 'feed_leg', 'description', 'created', 'last_updated',
|
||||
]
|
||||
brief_fields = ('id', 'url', 'display', 'name', 'description')
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ from extras.api.mixins import ConfigContextQuerySetMixin, RenderConfigMixin
|
||||
from netbox.api.authentication import IsAuthenticatedOrLoginNotRequired
|
||||
from netbox.api.metadata import ContentTypeMetadata
|
||||
from netbox.api.pagination import StripCountAnnotationsPaginator
|
||||
from netbox.api.viewsets import NetBoxModelViewSet, MPTTLockedMixin, NetBoxReadOnlyModelViewSet
|
||||
from netbox.api.viewsets import NetBoxModelViewSet, MPTTLockedMixin
|
||||
from netbox.api.viewsets.mixins import SequentialBulkCreatesMixin
|
||||
from utilities.api import get_serializer_for_model
|
||||
from utilities.query_functions import CollateAsChar
|
||||
@@ -563,7 +563,7 @@ class CableViewSet(NetBoxModelViewSet):
|
||||
filterset_class = filtersets.CableFilterSet
|
||||
|
||||
|
||||
class CableTerminationViewSet(NetBoxReadOnlyModelViewSet):
|
||||
class CableTerminationViewSet(NetBoxModelViewSet):
|
||||
metadata_class = ContentTypeMetadata
|
||||
queryset = CableTermination.objects.all()
|
||||
serializer_class = serializers.CableTerminationSerializer
|
||||
|
||||
@@ -14,16 +14,16 @@ from netbox.filtersets import (
|
||||
AttributeFiltersMixin, BaseFilterSet, ChangeLoggedModelFilterSet, NestedGroupModelFilterSet, NetBoxModelFilterSet,
|
||||
OrganizationalModelFilterSet,
|
||||
)
|
||||
from tenancy.filtersets import TenancyFilterSet, ContactModelFilterSet
|
||||
from tenancy.filtersets import ContactModelFilterSet, TenancyFilterSet
|
||||
from tenancy.models import *
|
||||
from users.models import User
|
||||
from utilities.filters import (
|
||||
ContentTypeFilter, MultiValueCharFilter, MultiValueMACAddressFilter, MultiValueNumberFilter, MultiValueWWNFilter,
|
||||
NumericArrayFilter, TreeNodeMultipleChoiceFilter,
|
||||
)
|
||||
from virtualization.models import Cluster, ClusterGroup, VMInterface, VirtualMachine
|
||||
from virtualization.models import Cluster, ClusterGroup, VirtualMachine, VMInterface
|
||||
from vpn.models import L2VPN
|
||||
from wireless.choices import WirelessRoleChoices, WirelessChannelChoices
|
||||
from wireless.choices import WirelessChannelChoices, WirelessRoleChoices
|
||||
from wireless.models import WirelessLAN, WirelessLink
|
||||
from .choices import *
|
||||
from .constants import *
|
||||
@@ -842,7 +842,7 @@ class PowerOutletTemplateFilterSet(ChangeLoggedModelFilterSet, ModularDeviceType
|
||||
|
||||
class Meta:
|
||||
model = PowerOutletTemplate
|
||||
fields = ('id', 'name', 'label', 'type', 'color', 'feed_leg', 'description')
|
||||
fields = ('id', 'name', 'label', 'type', 'feed_leg', 'description')
|
||||
|
||||
|
||||
class InterfaceTemplateFilterSet(ChangeLoggedModelFilterSet, ModularDeviceTypeComponentFilterSet):
|
||||
@@ -1288,7 +1288,6 @@ class DeviceFilterSet(
|
||||
Q(name__icontains=value) |
|
||||
Q(virtual_chassis__name__icontains=value) |
|
||||
Q(serial__icontains=value.strip()) |
|
||||
Q(inventoryitems__serial__icontains=value.strip()) |
|
||||
Q(asset_tag__icontains=value.strip()) |
|
||||
Q(description__icontains=value.strip()) |
|
||||
Q(comments__icontains=value) |
|
||||
@@ -1807,6 +1806,14 @@ class MACAddressFilterSet(NetBoxModelFilterSet):
|
||||
queryset=VMInterface.objects.all(),
|
||||
label=_('VM interface (ID)'),
|
||||
)
|
||||
assigned = django_filters.BooleanFilter(
|
||||
method='filter_assigned',
|
||||
label=_('Is assigned'),
|
||||
)
|
||||
primary = django_filters.BooleanFilter(
|
||||
method='filter_primary',
|
||||
label=_('Is primary'),
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = MACAddress
|
||||
@@ -1843,6 +1850,29 @@ class MACAddressFilterSet(NetBoxModelFilterSet):
|
||||
vminterface__in=interface_ids
|
||||
)
|
||||
|
||||
def filter_assigned(self, queryset, name, value):
|
||||
params = {
|
||||
'assigned_object_type__isnull': True,
|
||||
'assigned_object_id__isnull': True,
|
||||
}
|
||||
if value:
|
||||
return queryset.exclude(**params)
|
||||
else:
|
||||
return queryset.filter(**params)
|
||||
|
||||
def filter_primary(self, queryset, name, value):
|
||||
interface_mac_ids = Interface.objects.filter(primary_mac_address_id__isnull=False).values_list(
|
||||
'primary_mac_address_id', flat=True
|
||||
)
|
||||
vminterface_mac_ids = VMInterface.objects.filter(primary_mac_address_id__isnull=False).values_list(
|
||||
'primary_mac_address_id', flat=True
|
||||
)
|
||||
query = Q(pk__in=interface_mac_ids) | Q(pk__in=vminterface_mac_ids)
|
||||
if value:
|
||||
return queryset.filter(query)
|
||||
else:
|
||||
return queryset.exclude(query)
|
||||
|
||||
|
||||
class CommonInterfaceFilterSet(django_filters.FilterSet):
|
||||
mode = django_filters.MultipleChoiceFilter(
|
||||
|
||||
@@ -1163,10 +1163,6 @@ class PowerOutletTemplateBulkEditForm(ComponentTemplateBulkEditForm):
|
||||
choices=add_blank_choice(PowerOutletTypeChoices),
|
||||
required=False
|
||||
)
|
||||
color = ColorField(
|
||||
label=_('Color'),
|
||||
required=False
|
||||
)
|
||||
power_port = forms.ModelChoiceField(
|
||||
label=_('Power port'),
|
||||
queryset=PowerPortTemplate.objects.all(),
|
||||
|
||||
@@ -1676,12 +1676,16 @@ class MACAddressFilterForm(NetBoxModelFilterSetForm):
|
||||
model = MACAddress
|
||||
fieldsets = (
|
||||
FieldSet('q', 'filter_id', 'tag'),
|
||||
FieldSet('mac_address', 'device_id', 'virtual_machine_id', name=_('MAC address')),
|
||||
FieldSet('mac_address', name=_('Attributes')),
|
||||
FieldSet(
|
||||
'device_id', 'virtual_machine_id', 'assigned', 'primary',
|
||||
name=_('Assignments'),
|
||||
),
|
||||
)
|
||||
selector_fields = ('filter_id', 'q', 'device_id', 'virtual_machine_id')
|
||||
mac_address = forms.CharField(
|
||||
required=False,
|
||||
label=_('MAC address')
|
||||
label=_('MAC address'),
|
||||
)
|
||||
device_id = DynamicModelMultipleChoiceField(
|
||||
queryset=Device.objects.all(),
|
||||
@@ -1693,6 +1697,20 @@ class MACAddressFilterForm(NetBoxModelFilterSetForm):
|
||||
required=False,
|
||||
label=_('Assigned VM'),
|
||||
)
|
||||
assigned = forms.NullBooleanField(
|
||||
required=False,
|
||||
label=_('Assigned to an interface'),
|
||||
widget=forms.Select(
|
||||
choices=BOOLEAN_WITH_BLANK_CHOICES
|
||||
),
|
||||
)
|
||||
primary = forms.NullBooleanField(
|
||||
required=False,
|
||||
label=_('Primary MAC of an interface'),
|
||||
widget=forms.Select(
|
||||
choices=BOOLEAN_WITH_BLANK_CHOICES
|
||||
),
|
||||
)
|
||||
tag = TagFilterField(model)
|
||||
|
||||
|
||||
|
||||
@@ -755,7 +755,10 @@ class ModuleForm(ModuleCommonForm, NetBoxModelForm):
|
||||
queryset=ModuleBay.objects.all(),
|
||||
query_params={
|
||||
'device_id': '$device'
|
||||
}
|
||||
},
|
||||
context={
|
||||
'disabled': 'installed_module',
|
||||
},
|
||||
)
|
||||
module_type = DynamicModelChoiceField(
|
||||
label=_('Module type'),
|
||||
@@ -1092,14 +1095,14 @@ class PowerOutletTemplateForm(ModularComponentTemplateForm):
|
||||
FieldSet('device_type', name=_('Device Type')),
|
||||
FieldSet('module_type', name=_('Module Type')),
|
||||
),
|
||||
'name', 'label', 'type', 'color', 'power_port', 'feed_leg', 'description',
|
||||
'name', 'label', 'type', 'power_port', 'feed_leg', 'description',
|
||||
),
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = PowerOutletTemplate
|
||||
fields = [
|
||||
'device_type', 'module_type', 'name', 'label', 'type', 'color', 'power_port', 'feed_leg', 'description',
|
||||
'device_type', 'module_type', 'name', 'label', 'type', 'power_port', 'feed_leg', 'description',
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -18,7 +18,9 @@ from netbox.graphql.filter_mixins import (
|
||||
ImageAttachmentFilterMixin,
|
||||
WeightFilterMixin,
|
||||
)
|
||||
from tenancy.graphql.filter_mixins import TenancyFilterMixin, ContactFilterMixin
|
||||
from tenancy.graphql.filter_mixins import ContactFilterMixin, TenancyFilterMixin
|
||||
from virtualization.models import VMInterface
|
||||
|
||||
from .filter_mixins import (
|
||||
CabledObjectModelFilterMixin,
|
||||
ComponentModelFilterMixin,
|
||||
@@ -419,6 +421,24 @@ class MACAddressFilter(PrimaryModelFilterMixin):
|
||||
)
|
||||
assigned_object_id: ID | None = strawberry_django.filter_field()
|
||||
|
||||
@strawberry_django.filter_field()
|
||||
def assigned(self, value: bool, prefix) -> Q:
|
||||
return Q(**{f'{prefix}assigned_object_id__isnull': (not value)})
|
||||
|
||||
@strawberry_django.filter_field()
|
||||
def primary(self, value: bool, prefix) -> Q:
|
||||
interface_mac_ids = models.Interface.objects.filter(primary_mac_address_id__isnull=False).values_list(
|
||||
'primary_mac_address_id', flat=True
|
||||
)
|
||||
vminterface_mac_ids = VMInterface.objects.filter(primary_mac_address_id__isnull=False).values_list(
|
||||
'primary_mac_address_id', flat=True
|
||||
)
|
||||
query = Q(**{f'{prefix}pk__in': interface_mac_ids}) | Q(**{f'{prefix}pk__in': vminterface_mac_ids})
|
||||
if value:
|
||||
return Q(query)
|
||||
else:
|
||||
return ~Q(query)
|
||||
|
||||
|
||||
@strawberry_django.filter_type(models.Interface, lookups=True)
|
||||
class InterfaceFilter(ModularComponentModelFilterMixin, InterfaceBaseFilterMixin, CabledObjectModelFilterMixin):
|
||||
|
||||
@@ -673,7 +673,6 @@ class PowerOutletType(ModularComponentType, CabledObjectMixin, PathEndpointMixin
|
||||
)
|
||||
class PowerOutletTemplateType(ModularComponentTemplateType):
|
||||
power_port: Annotated["PowerPortTemplateType", strawberry.lazy('dcim.graphql.types')] | None
|
||||
color: str
|
||||
|
||||
|
||||
@strawberry_django.type(
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
import utilities.fields
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('dcim', '0215_rackreservation_status'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='poweroutlettemplate',
|
||||
name='color',
|
||||
field=utilities.fields.ColorField(blank=True, max_length=6),
|
||||
),
|
||||
]
|
||||
@@ -1,26 +0,0 @@
|
||||
from django.contrib.postgres.operations import CreateCollation
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('dcim', '0216_poweroutlettemplate_color'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
# Create a case-insensitive collation
|
||||
CreateCollation(
|
||||
'case_insensitive',
|
||||
provider='icu',
|
||||
locale='und-u-ks-level2',
|
||||
deterministic=False,
|
||||
),
|
||||
# Create a case-insensitive collation with natural sorting
|
||||
CreateCollation(
|
||||
'ci_natural_sort',
|
||||
provider='icu',
|
||||
locale='und-u-kn-true-ks-level2',
|
||||
deterministic=False,
|
||||
),
|
||||
]
|
||||
@@ -1,311 +0,0 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
PATTERN_OPS_INDEXES = [
|
||||
'dcim_devicerole_slug_7952643b_like',
|
||||
'dcim_devicetype_slug_448745bd_like',
|
||||
'dcim_inventoryitemrole_name_4c8cfe6d_like',
|
||||
'dcim_inventoryitemrole_slug_3556c227_like',
|
||||
'dcim_location_slug_352c5472_like',
|
||||
'dcim_manufacturer_name_841fcd92_like',
|
||||
'dcim_manufacturer_slug_00430749_like',
|
||||
'dcim_moduletypeprofile_name_1709c36e_like',
|
||||
'dcim_platform_slug_b0908ae4_like',
|
||||
'dcim_rackrole_name_9077cfcc_like',
|
||||
'dcim_rackrole_slug_40bbcd3a_like',
|
||||
'dcim_racktype_slug_6bbb384a_like',
|
||||
'dcim_region_slug_ff078a66_like',
|
||||
'dcim_site_name_8fe66c76_like',
|
||||
'dcim_site_slug_4412c762_like',
|
||||
'dcim_sitegroup_slug_a11d2b04_like',
|
||||
]
|
||||
|
||||
|
||||
def remove_indexes(apps, schema_editor):
|
||||
for idx in PATTERN_OPS_INDEXES:
|
||||
schema_editor.execute(f'DROP INDEX IF EXISTS {idx}')
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('dcim', '0217_ci_collations'),
|
||||
('extras', '0134_ci_collations'),
|
||||
('ipam', '0083_ci_collations'),
|
||||
('tenancy', '0021_ci_collations'),
|
||||
('virtualization', '0048_populate_mac_addresses'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
code=remove_indexes,
|
||||
reverse_code=migrations.RunPython.noop,
|
||||
),
|
||||
migrations.RemoveConstraint(
|
||||
model_name='device',
|
||||
name='dcim_device_unique_name_site_tenant',
|
||||
),
|
||||
migrations.RemoveConstraint(
|
||||
model_name='device',
|
||||
name='dcim_device_unique_name_site',
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='consoleport',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=64),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='consoleporttemplate',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=64),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='consoleserverport',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=64),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='consoleserverporttemplate',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=64),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='device',
|
||||
name='name',
|
||||
field=models.CharField(blank=True, db_collation='ci_natural_sort', max_length=64, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='devicebay',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=64),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='devicebaytemplate',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=64),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='devicerole',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='devicerole',
|
||||
name='slug',
|
||||
field=models.SlugField(db_collation='case_insensitive', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='devicetype',
|
||||
name='model',
|
||||
field=models.CharField(db_collation='case_insensitive', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='devicetype',
|
||||
name='slug',
|
||||
field=models.SlugField(db_collation='case_insensitive', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='frontport',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=64),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='frontporttemplate',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=64),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='interface',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=64),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='interfacetemplate',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=64),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventoryitem',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=64),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventoryitemrole',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventoryitemrole',
|
||||
name='slug',
|
||||
field=models.SlugField(db_collation='case_insensitive', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventoryitemtemplate',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=64),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='location',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='location',
|
||||
name='slug',
|
||||
field=models.SlugField(db_collation='case_insensitive', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='manufacturer',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='manufacturer',
|
||||
name='slug',
|
||||
field=models.SlugField(db_collation='case_insensitive', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='modulebay',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=64),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='modulebaytemplate',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=64),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='moduletype',
|
||||
name='model',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='moduletypeprofile',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='platform',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='platform',
|
||||
name='slug',
|
||||
field=models.SlugField(db_collation='case_insensitive', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='powerfeed',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='poweroutlet',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=64),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='poweroutlettemplate',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=64),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='powerpanel',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='powerport',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=64),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='powerporttemplate',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=64),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='rack',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='rackrole',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='rackrole',
|
||||
name='slug',
|
||||
field=models.SlugField(db_collation='case_insensitive', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='racktype',
|
||||
name='model',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='racktype',
|
||||
name='slug',
|
||||
field=models.SlugField(db_collation='case_insensitive', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='rearport',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=64),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='rearporttemplate',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=64),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='region',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='region',
|
||||
name='slug',
|
||||
field=models.SlugField(db_collation='case_insensitive', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='site',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='site',
|
||||
name='slug',
|
||||
field=models.SlugField(db_collation='case_insensitive', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='sitegroup',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='sitegroup',
|
||||
name='slug',
|
||||
field=models.SlugField(db_collation='case_insensitive', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='virtualdevicecontext',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=64),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name='device',
|
||||
constraint=models.UniqueConstraint(
|
||||
models.F('name'), models.F('site'), models.F('tenant'), name='dcim_device_unique_name_site_tenant'
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name='device',
|
||||
constraint=models.UniqueConstraint(
|
||||
models.F('name'),
|
||||
models.F('site'),
|
||||
condition=models.Q(('tenant__isnull', True)),
|
||||
name='dcim_device_unique_name_site',
|
||||
violation_error_message='Device name must be unique per site.',
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -393,6 +393,17 @@ class CableTermination(ChangeLoggedModel):
|
||||
def clean(self):
|
||||
super().clean()
|
||||
|
||||
# Disallow connecting a cable to any termination object that is
|
||||
# explicitly flagged as "mark connected".
|
||||
termination = getattr(self, 'termination', None)
|
||||
if termination is not None and getattr(termination, "mark_connected", False):
|
||||
raise ValidationError(
|
||||
_("Cannot connect a cable to {obj_parent} > {obj} because it is marked as connected.").format(
|
||||
obj_parent=termination.parent_object,
|
||||
obj=termination,
|
||||
)
|
||||
)
|
||||
|
||||
# Check for existing termination
|
||||
qs = CableTermination.objects.filter(
|
||||
termination_type=self.termination_type,
|
||||
@@ -404,14 +415,14 @@ class CableTermination(ChangeLoggedModel):
|
||||
existing_termination = qs.first()
|
||||
if existing_termination is not None:
|
||||
raise ValidationError(
|
||||
_("Duplicate termination found for {app_label}.{model} {termination_id}: cable {cable_pk}".format(
|
||||
_("Duplicate termination found for {app_label}.{model} {termination_id}: cable {cable_pk}").format(
|
||||
app_label=self.termination_type.app_label,
|
||||
model=self.termination_type.model,
|
||||
termination_id=self.termination_id,
|
||||
cable_pk=existing_termination.cable.pk
|
||||
))
|
||||
)
|
||||
)
|
||||
# Validate interface type (if applicable)
|
||||
# Validate the interface type (if applicable)
|
||||
if self.termination_type.model == 'interface' and self.termination.type in NONCONNECTABLE_IFACE_TYPES:
|
||||
raise ValidationError(
|
||||
_("Cables cannot be terminated to {type_display} interfaces").format(
|
||||
|
||||
@@ -43,10 +43,10 @@ class ComponentTemplateModel(ChangeLoggedModel, TrackingModelMixin):
|
||||
name = models.CharField(
|
||||
verbose_name=_('name'),
|
||||
max_length=64,
|
||||
db_collation='ci_natural_sort',
|
||||
help_text=_(
|
||||
"{module} is accepted as a substitution for the module bay position when attached to a module type."
|
||||
),
|
||||
db_collation="natural_sort"
|
||||
)
|
||||
label = models.CharField(
|
||||
verbose_name=_('label'),
|
||||
@@ -339,10 +339,6 @@ class PowerOutletTemplate(ModularComponentTemplateModel):
|
||||
blank=True,
|
||||
null=True
|
||||
)
|
||||
color = ColorField(
|
||||
verbose_name=_('color'),
|
||||
blank=True
|
||||
)
|
||||
power_port = models.ForeignKey(
|
||||
to='dcim.PowerPortTemplate',
|
||||
on_delete=models.SET_NULL,
|
||||
@@ -393,7 +389,6 @@ class PowerOutletTemplate(ModularComponentTemplateModel):
|
||||
name=self.resolve_name(kwargs.get('module')),
|
||||
label=self.resolve_label(kwargs.get('module')),
|
||||
type=self.type,
|
||||
color=self.color,
|
||||
power_port=power_port,
|
||||
feed_leg=self.feed_leg,
|
||||
**kwargs
|
||||
@@ -404,7 +399,6 @@ class PowerOutletTemplate(ModularComponentTemplateModel):
|
||||
return {
|
||||
'name': self.name,
|
||||
'type': self.type,
|
||||
'color': self.color,
|
||||
'power_port': self.power_port.name if self.power_port else None,
|
||||
'feed_leg': self.feed_leg,
|
||||
'label': self.label,
|
||||
|
||||
@@ -52,7 +52,7 @@ class ComponentModel(NetBoxModel):
|
||||
name = models.CharField(
|
||||
verbose_name=_('name'),
|
||||
max_length=64,
|
||||
db_collation='ci_natural_sort',
|
||||
db_collation="natural_sort"
|
||||
)
|
||||
label = models.CharField(
|
||||
verbose_name=_('label'),
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import decimal
|
||||
import yaml
|
||||
|
||||
from functools import cached_property
|
||||
|
||||
import yaml
|
||||
from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.core.exceptions import ValidationError
|
||||
@@ -9,6 +10,7 @@ from django.core.files.storage import default_storage
|
||||
from django.core.validators import MaxValueValidator, MinValueValidator
|
||||
from django.db import models
|
||||
from django.db.models import F, ProtectedError, prefetch_related_objects
|
||||
from django.db.models.functions import Lower
|
||||
from django.db.models.signals import post_save
|
||||
from django.urls import reverse
|
||||
from django.utils.safestring import mark_safe
|
||||
@@ -23,8 +25,8 @@ from extras.querysets import ConfigContextModelQuerySet
|
||||
from netbox.choices import ColorChoices
|
||||
from netbox.config import ConfigItem
|
||||
from netbox.models import NestedGroupModel, OrganizationalModel, PrimaryModel
|
||||
from netbox.models.features import ContactsMixin, ImageAttachmentsMixin
|
||||
from netbox.models.mixins import WeightMixin
|
||||
from netbox.models.features import ContactsMixin, ImageAttachmentsMixin
|
||||
from utilities.fields import ColorField, CounterCacheField
|
||||
from utilities.prefetch import get_prefetchable_fields
|
||||
from utilities.tracking import TrackingModelMixin
|
||||
@@ -32,6 +34,7 @@ from .device_components import *
|
||||
from .mixins import RenderConfigMixin
|
||||
from .modules import Module
|
||||
|
||||
|
||||
__all__ = (
|
||||
'Device',
|
||||
'DeviceRole',
|
||||
@@ -80,13 +83,11 @@ class DeviceType(ImageAttachmentsMixin, PrimaryModel, WeightMixin):
|
||||
)
|
||||
model = models.CharField(
|
||||
verbose_name=_('model'),
|
||||
max_length=100,
|
||||
db_collation='case_insensitive',
|
||||
max_length=100
|
||||
)
|
||||
slug = models.SlugField(
|
||||
verbose_name=_('slug'),
|
||||
max_length=100,
|
||||
db_collation='case_insensitive',
|
||||
max_length=100
|
||||
)
|
||||
default_platform = models.ForeignKey(
|
||||
to='dcim.Platform',
|
||||
@@ -524,7 +525,7 @@ class Device(
|
||||
max_length=64,
|
||||
blank=True,
|
||||
null=True,
|
||||
db_collation='ci_natural_sort',
|
||||
db_collation="natural_sort"
|
||||
)
|
||||
serial = models.CharField(
|
||||
max_length=50,
|
||||
@@ -720,11 +721,11 @@ class Device(
|
||||
ordering = ('name', 'pk') # Name may be null
|
||||
constraints = (
|
||||
models.UniqueConstraint(
|
||||
'name', 'site', 'tenant',
|
||||
Lower('name'), 'site', 'tenant',
|
||||
name='%(app_label)s_%(class)s_unique_name_site_tenant'
|
||||
),
|
||||
models.UniqueConstraint(
|
||||
'name', 'site',
|
||||
Lower('name'), 'site',
|
||||
name='%(app_label)s_%(class)s_unique_name_site',
|
||||
condition=Q(tenant__isnull=True),
|
||||
violation_error_message=_("Device name must be unique per site.")
|
||||
@@ -1118,7 +1119,7 @@ class VirtualChassis(PrimaryModel):
|
||||
name = models.CharField(
|
||||
verbose_name=_('name'),
|
||||
max_length=64,
|
||||
db_collation='natural_sort',
|
||||
db_collation="natural_sort"
|
||||
)
|
||||
domain = models.CharField(
|
||||
verbose_name=_('domain'),
|
||||
@@ -1181,7 +1182,7 @@ class VirtualDeviceContext(PrimaryModel):
|
||||
name = models.CharField(
|
||||
verbose_name=_('name'),
|
||||
max_length=64,
|
||||
db_collation='ci_natural_sort',
|
||||
db_collation="natural_sort"
|
||||
)
|
||||
status = models.CharField(
|
||||
verbose_name=_('status'),
|
||||
|
||||
@@ -31,8 +31,7 @@ class ModuleTypeProfile(PrimaryModel):
|
||||
name = models.CharField(
|
||||
verbose_name=_('name'),
|
||||
max_length=100,
|
||||
unique=True,
|
||||
db_collation='ci_natural_sort',
|
||||
unique=True
|
||||
)
|
||||
schema = models.JSONField(
|
||||
blank=True,
|
||||
@@ -73,8 +72,7 @@ class ModuleType(ImageAttachmentsMixin, PrimaryModel, WeightMixin):
|
||||
)
|
||||
model = models.CharField(
|
||||
verbose_name=_('model'),
|
||||
max_length=100,
|
||||
db_collation='ci_natural_sort',
|
||||
max_length=100
|
||||
)
|
||||
part_number = models.CharField(
|
||||
verbose_name=_('part number'),
|
||||
|
||||
@@ -37,7 +37,7 @@ class PowerPanel(ContactsMixin, ImageAttachmentsMixin, PrimaryModel):
|
||||
name = models.CharField(
|
||||
verbose_name=_('name'),
|
||||
max_length=100,
|
||||
db_collation='ci_natural_sort',
|
||||
db_collation="natural_sort"
|
||||
)
|
||||
|
||||
prerequisite_models = (
|
||||
@@ -88,7 +88,7 @@ class PowerFeed(PrimaryModel, PathEndpoint, CabledObjectModel):
|
||||
name = models.CharField(
|
||||
verbose_name=_('name'),
|
||||
max_length=100,
|
||||
db_collation='ci_natural_sort',
|
||||
db_collation="natural_sort"
|
||||
)
|
||||
status = models.CharField(
|
||||
verbose_name=_('status'),
|
||||
|
||||
@@ -137,14 +137,12 @@ class RackType(RackBase):
|
||||
)
|
||||
model = models.CharField(
|
||||
verbose_name=_('model'),
|
||||
max_length=100,
|
||||
db_collation='ci_natural_sort',
|
||||
max_length=100
|
||||
)
|
||||
slug = models.SlugField(
|
||||
verbose_name=_('slug'),
|
||||
max_length=100,
|
||||
unique=True,
|
||||
db_collation='case_insensitive',
|
||||
unique=True
|
||||
)
|
||||
|
||||
clone_fields = (
|
||||
@@ -264,7 +262,7 @@ class Rack(ContactsMixin, ImageAttachmentsMixin, RackBase):
|
||||
name = models.CharField(
|
||||
verbose_name=_('name'),
|
||||
max_length=100,
|
||||
db_collation='ci_natural_sort',
|
||||
db_collation="natural_sort"
|
||||
)
|
||||
facility_id = models.CharField(
|
||||
max_length=50,
|
||||
|
||||
@@ -142,14 +142,13 @@ class Site(ContactsMixin, ImageAttachmentsMixin, PrimaryModel):
|
||||
verbose_name=_('name'),
|
||||
max_length=100,
|
||||
unique=True,
|
||||
db_collation='ci_natural_sort',
|
||||
help_text=_("Full name of the site")
|
||||
help_text=_("Full name of the site"),
|
||||
db_collation="natural_sort"
|
||||
)
|
||||
slug = models.SlugField(
|
||||
verbose_name=_('slug'),
|
||||
max_length=100,
|
||||
unique=True,
|
||||
db_collation='case_insensitive',
|
||||
unique=True
|
||||
)
|
||||
status = models.CharField(
|
||||
verbose_name=_('status'),
|
||||
|
||||
@@ -1174,6 +1174,9 @@ class MACAddressTable(NetBoxTable):
|
||||
orderable=False,
|
||||
verbose_name=_('Parent')
|
||||
)
|
||||
is_primary = columns.BooleanColumn(
|
||||
verbose_name=_('Primary')
|
||||
)
|
||||
tags = columns.TagColumn(
|
||||
url_name='dcim:macaddress_list'
|
||||
)
|
||||
@@ -1184,7 +1187,7 @@ class MACAddressTable(NetBoxTable):
|
||||
class Meta(DeviceComponentTable.Meta):
|
||||
model = models.MACAddress
|
||||
fields = (
|
||||
'pk', 'id', 'mac_address', 'assigned_object_parent', 'assigned_object', 'description', 'comments', 'tags',
|
||||
'created', 'last_updated',
|
||||
'pk', 'id', 'mac_address', 'assigned_object_parent', 'assigned_object', 'description', 'is_primary',
|
||||
'comments', 'tags', 'created', 'last_updated',
|
||||
)
|
||||
default_columns = ('pk', 'mac_address', 'assigned_object_parent', 'assigned_object', 'description')
|
||||
|
||||
@@ -211,9 +211,6 @@ class PowerPortTemplateTable(ComponentTemplateTable):
|
||||
|
||||
|
||||
class PowerOutletTemplateTable(ComponentTemplateTable):
|
||||
color = columns.ColorColumn(
|
||||
verbose_name=_('Color'),
|
||||
)
|
||||
actions = columns.ActionsColumn(
|
||||
actions=('edit', 'delete'),
|
||||
extra_buttons=MODULAR_COMPONENT_TEMPLATE_BUTTONS
|
||||
@@ -221,7 +218,7 @@ class PowerOutletTemplateTable(ComponentTemplateTable):
|
||||
|
||||
class Meta(ComponentTemplateTable.Meta):
|
||||
model = models.PowerOutletTemplate
|
||||
fields = ('pk', 'name', 'label', 'type', 'color', 'power_port', 'feed_leg', 'description', 'actions')
|
||||
fields = ('pk', 'name', 'label', 'type', 'power_port', 'feed_leg', 'description', 'actions')
|
||||
empty_text = "None"
|
||||
|
||||
|
||||
|
||||
@@ -13,8 +13,7 @@ from ipam.choices import VLANQinQRoleChoices
|
||||
from ipam.models import ASN, RIR, VLAN, VRF
|
||||
from netbox.api.serializers import GenericObjectSerializer
|
||||
from tenancy.models import Tenant
|
||||
from users.constants import TOKEN_PREFIX
|
||||
from users.models import Token, User
|
||||
from users.models import User
|
||||
from utilities.testing import APITestCase, APIViewTestCases, create_test_device, disable_logging
|
||||
from virtualization.models import Cluster, ClusterType
|
||||
from wireless.choices import WirelessChannelChoices
|
||||
@@ -1307,6 +1306,7 @@ class DeviceTest(APIViewTestCases.APIViewTestCase):
|
||||
}
|
||||
user_permissions = (
|
||||
'dcim.view_site', 'dcim.view_rack', 'dcim.view_location', 'dcim.view_devicerole', 'dcim.view_devicetype',
|
||||
'extras.view_configtemplate',
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@@ -1486,58 +1486,12 @@ class DeviceTest(APIViewTestCases.APIViewTestCase):
|
||||
device.config_template = configtemplate
|
||||
device.save()
|
||||
|
||||
self.add_permissions('dcim.render_config_device', 'dcim.view_device')
|
||||
url = reverse('dcim-api:device-render-config', kwargs={'pk': device.pk})
|
||||
self.add_permissions('dcim.add_device')
|
||||
url = reverse('dcim-api:device-detail', kwargs={'pk': device.pk}) + 'render-config/'
|
||||
response = self.client.post(url, {}, format='json', **self.header)
|
||||
self.assertHttpStatus(response, status.HTTP_200_OK)
|
||||
self.assertEqual(response.data['content'], f'Config for device {device.name}')
|
||||
|
||||
def test_render_config_without_permission(self):
|
||||
configtemplate = ConfigTemplate.objects.create(
|
||||
name='Config Template 1',
|
||||
template_code='Config for device {{ device.name }}'
|
||||
)
|
||||
|
||||
device = Device.objects.first()
|
||||
device.config_template = configtemplate
|
||||
device.save()
|
||||
|
||||
# No permissions added - user has no render_config permission
|
||||
url = reverse('dcim-api:device-render-config', kwargs={'pk': device.pk})
|
||||
response = self.client.post(url, {}, format='json', **self.header)
|
||||
self.assertHttpStatus(response, status.HTTP_404_NOT_FOUND)
|
||||
|
||||
def test_render_config_token_write_enabled(self):
|
||||
configtemplate = ConfigTemplate.objects.create(
|
||||
name='Config Template 1',
|
||||
template_code='Config for device {{ device.name }}'
|
||||
)
|
||||
|
||||
device = Device.objects.first()
|
||||
device.config_template = configtemplate
|
||||
device.save()
|
||||
|
||||
self.add_permissions('dcim.render_config_device', 'dcim.view_device')
|
||||
url = reverse('dcim-api:device-render-config', kwargs={'pk': device.pk})
|
||||
|
||||
# Request without token auth should fail with PermissionDenied
|
||||
response = self.client.post(url, {}, format='json')
|
||||
self.assertHttpStatus(response, status.HTTP_403_FORBIDDEN)
|
||||
|
||||
# Create token with write_enabled=False
|
||||
token = Token.objects.create(version=2, user=self.user, write_enabled=False)
|
||||
token_header = f'Bearer {TOKEN_PREFIX}{token.key}.{token.token}'
|
||||
|
||||
# Request with write-disabled token should fail
|
||||
response = self.client.post(url, {}, format='json', HTTP_AUTHORIZATION=token_header)
|
||||
self.assertHttpStatus(response, status.HTTP_403_FORBIDDEN)
|
||||
|
||||
# Enable write and retry
|
||||
token.write_enabled = True
|
||||
token.save()
|
||||
response = self.client.post(url, {}, format='json', HTTP_AUTHORIZATION=token_header)
|
||||
self.assertHttpStatus(response, status.HTTP_200_OK)
|
||||
|
||||
|
||||
class ModuleTest(APIViewTestCases.APIViewTestCase):
|
||||
model = Module
|
||||
@@ -2422,33 +2376,6 @@ class CableTest(APIViewTestCases.APIViewTestCase):
|
||||
]
|
||||
|
||||
|
||||
class CableTerminationTest(
|
||||
APIViewTestCases.GetObjectViewTestCase,
|
||||
APIViewTestCases.ListObjectsViewTestCase,
|
||||
):
|
||||
model = CableTermination
|
||||
brief_fields = ['cable', 'cable_end', 'display', 'id', 'termination_id', 'termination_type', 'url']
|
||||
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
device1 = create_test_device('Device 1')
|
||||
device2 = create_test_device('Device 2')
|
||||
|
||||
interfaces = []
|
||||
for device in (device1, device2):
|
||||
for i in range(0, 10):
|
||||
interfaces.append(Interface(device=device, type=InterfaceTypeChoices.TYPE_1GE_FIXED, name=f'eth{i}'))
|
||||
Interface.objects.bulk_create(interfaces)
|
||||
|
||||
cables = (
|
||||
Cable(a_terminations=[interfaces[0]], b_terminations=[interfaces[10]], label='Cable 1'),
|
||||
Cable(a_terminations=[interfaces[1]], b_terminations=[interfaces[11]], label='Cable 2'),
|
||||
Cable(a_terminations=[interfaces[2]], b_terminations=[interfaces[12]], label='Cable 3'),
|
||||
)
|
||||
for cable in cables:
|
||||
cable.save()
|
||||
|
||||
|
||||
class ConnectedDeviceTest(APITestCase):
|
||||
|
||||
@classmethod
|
||||
|
||||
@@ -10,7 +10,7 @@ from netbox.choices import ColorChoices, WeightUnitChoices
|
||||
from tenancy.models import Tenant, TenantGroup
|
||||
from users.models import User
|
||||
from utilities.testing import ChangeLoggedFilterSetTests, create_test_device, create_test_virtualmachine
|
||||
from virtualization.models import Cluster, ClusterType, ClusterGroup, VMInterface, VirtualMachine
|
||||
from virtualization.models import Cluster, ClusterGroup, ClusterType, VirtualMachine, VMInterface
|
||||
from wireless.choices import WirelessChannelChoices, WirelessRoleChoices
|
||||
from wireless.models import WirelessLink
|
||||
|
||||
@@ -1919,21 +1919,18 @@ class PowerOutletTemplateTestCase(TestCase, DeviceComponentTemplateFilterSetTest
|
||||
device_type=device_types[0],
|
||||
name='Power Outlet 1',
|
||||
feed_leg=PowerOutletFeedLegChoices.FEED_LEG_A,
|
||||
color=ColorChoices.COLOR_RED,
|
||||
description='foobar1'
|
||||
),
|
||||
PowerOutletTemplate(
|
||||
device_type=device_types[1],
|
||||
name='Power Outlet 2',
|
||||
feed_leg=PowerOutletFeedLegChoices.FEED_LEG_B,
|
||||
color=ColorChoices.COLOR_GREEN,
|
||||
description='foobar2'
|
||||
),
|
||||
PowerOutletTemplate(
|
||||
device_type=device_types[2],
|
||||
name='Power Outlet 3',
|
||||
feed_leg=PowerOutletFeedLegChoices.FEED_LEG_C,
|
||||
color=ColorChoices.COLOR_BLUE,
|
||||
description='foobar3'
|
||||
),
|
||||
))
|
||||
@@ -1946,10 +1943,6 @@ class PowerOutletTemplateTestCase(TestCase, DeviceComponentTemplateFilterSetTest
|
||||
params = {'feed_leg': [PowerOutletFeedLegChoices.FEED_LEG_A, PowerOutletFeedLegChoices.FEED_LEG_B]}
|
||||
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2)
|
||||
|
||||
def test_color(self):
|
||||
params = {'color': [ColorChoices.COLOR_RED, ColorChoices.COLOR_GREEN]}
|
||||
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2)
|
||||
|
||||
|
||||
class InterfaceTemplateTestCase(TestCase, DeviceComponentTemplateFilterSetTests, ChangeLoggedFilterSetTests):
|
||||
queryset = InterfaceTemplate.objects.all()
|
||||
@@ -7171,9 +7164,20 @@ class MACAddressTestCase(TestCase, ChangeLoggedFilterSetTests):
|
||||
MACAddress(mac_address='00-00-00-05-01-01', assigned_object=vm_interfaces[1]),
|
||||
MACAddress(mac_address='00-00-00-06-01-01', assigned_object=vm_interfaces[2]),
|
||||
MACAddress(mac_address='00-00-00-06-01-02', assigned_object=vm_interfaces[2]),
|
||||
# unassigned
|
||||
MACAddress(mac_address='00-00-00-07-01-01'),
|
||||
)
|
||||
MACAddress.objects.bulk_create(mac_addresses)
|
||||
|
||||
# Set MAC addresses as primary
|
||||
for idx, interface in enumerate(interfaces):
|
||||
interface.primary_mac_address = mac_addresses[idx]
|
||||
interface.save()
|
||||
for idx, vm_interface in enumerate(vm_interfaces):
|
||||
# Offset by 4 for device MACs
|
||||
vm_interface.primary_mac_address = mac_addresses[idx + 4]
|
||||
vm_interface.save()
|
||||
|
||||
def test_mac_address(self):
|
||||
params = {'mac_address': ['00-00-00-01-01-01', '00-00-00-02-01-01']}
|
||||
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2)
|
||||
@@ -7205,3 +7209,15 @@ class MACAddressTestCase(TestCase, ChangeLoggedFilterSetTests):
|
||||
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2)
|
||||
params = {'vminterface': [vm_interfaces[0].name, vm_interfaces[1].name]}
|
||||
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2)
|
||||
|
||||
def test_assigned(self):
|
||||
params = {'assigned': True}
|
||||
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 8)
|
||||
params = {'assigned': False}
|
||||
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1)
|
||||
|
||||
def test_primary(self):
|
||||
params = {'primary': True}
|
||||
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 6)
|
||||
params = {'primary': False}
|
||||
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 3)
|
||||
|
||||
@@ -967,6 +967,18 @@ class CableTestCase(TestCase):
|
||||
with self.assertRaises(ValidationError):
|
||||
cable.clean()
|
||||
|
||||
def test_cannot_cable_to_mark_connected(self):
|
||||
"""
|
||||
Test that a cable cannot be connected to an interface marked as connected.
|
||||
"""
|
||||
device1 = Device.objects.get(name='TestDevice1')
|
||||
interface1 = Interface.objects.get(device__name='TestDevice2', name='eth1')
|
||||
|
||||
mark_connected_interface = Interface(device=device1, name='mark_connected1', mark_connected=True)
|
||||
cable = Cable(a_terminations=[mark_connected_interface], b_terminations=[interface1])
|
||||
with self.assertRaises(ValidationError):
|
||||
cable.clean()
|
||||
|
||||
|
||||
class VirtualDeviceContextTestCase(TestCase):
|
||||
|
||||
|
||||
@@ -2885,6 +2885,43 @@ class InterfaceTestCase(ViewTestCases.DeviceComponentViewTestCase):
|
||||
self.client.post(self._get_url('bulk_delete'), data)
|
||||
self.assertEqual(device.interfaces.count(), 4) # Child & parent were both deleted
|
||||
|
||||
def test_rename_select_all_spans_pages(self):
|
||||
"""
|
||||
Tests the bulk rename functionality for interfaces spanning multiple pages in the UI.
|
||||
"""
|
||||
device_name = 'DeviceRename'
|
||||
device = create_test_device(device_name)
|
||||
# Create > default page size (25) so selection spans multiple pages
|
||||
for i in range(37):
|
||||
Interface.objects.create(device=device, name=f'eth{i}')
|
||||
|
||||
self.add_permissions('dcim.change_interface')
|
||||
|
||||
# Filter to this device's interfaces to simulate a real list filter
|
||||
get_qs = {'device_id': Device.objects.get(name=device_name).pk}
|
||||
post_url = f'{self._get_url("bulk_rename")}?device_id={get_qs["device_id"]}'
|
||||
|
||||
# Preview step: ensure 37 selected (not just one page)
|
||||
data = {'_preview': '1', '_all': '1', 'find': 'eth', 'replace': 'xe'}
|
||||
response = self.client.post(post_url, data=data)
|
||||
self.assertHttpStatus(response, 200)
|
||||
self.assertEqual(len(response.context['selected_objects']), 37)
|
||||
|
||||
# Extract pk[] just like the browser would submit on Apply
|
||||
# (either from the form's initial, or from selected_objects)
|
||||
pk_list = response.context['form'].initial.get('pk')
|
||||
if not pk_list:
|
||||
pk_list = [obj.pk for obj in response.context['selected_objects']]
|
||||
pk_list = [str(pk) for pk in pk_list]
|
||||
|
||||
# Apply step: include pk[] in the POST
|
||||
apply_data = {'_apply': '1', '_all': '1', 'find': 'eth', 'replace': 'xe', 'pk': pk_list}
|
||||
response = self.client.post(post_url, data=apply_data)
|
||||
|
||||
# On success the view redirects back to the return URL
|
||||
self.assertHttpStatus(response, 302)
|
||||
self.assertEqual(Interface.objects.filter(device=device, name__startswith='xe').count(), 37)
|
||||
|
||||
|
||||
class FrontPortTestCase(ViewTestCases.DeviceComponentViewTestCase):
|
||||
model = FrontPort
|
||||
|
||||
@@ -295,6 +295,7 @@ class RegionBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(Region, 'bulk_rename', path='rename', detail=False)
|
||||
class RegionBulkRenameView(generic.BulkRenameView):
|
||||
queryset = Region.objects.all()
|
||||
filterset = filtersets.RegionFilterSet
|
||||
|
||||
|
||||
@register_model_view(Region, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -426,6 +427,7 @@ class SiteGroupBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(SiteGroup, 'bulk_rename', path='rename', detail=False)
|
||||
class SiteGroupBulkRenameView(generic.BulkRenameView):
|
||||
queryset = SiteGroup.objects.all()
|
||||
filterset = filtersets.SiteGroupFilterSet
|
||||
|
||||
|
||||
@register_model_view(SiteGroup, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -516,6 +518,7 @@ class SiteBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(Site, 'bulk_rename', path='rename', detail=False)
|
||||
class SiteBulkRenameView(generic.BulkRenameView):
|
||||
queryset = Site.objects.all()
|
||||
filterset = filtersets.SiteFilterSet
|
||||
|
||||
|
||||
@register_model_view(Site, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -625,6 +628,7 @@ class LocationBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(Location, 'bulk_rename', path='rename', detail=False)
|
||||
class LocationBulkRenameView(generic.BulkRenameView):
|
||||
queryset = Location.objects.all()
|
||||
filterset = filtersets.LocationFilterSet
|
||||
|
||||
|
||||
@register_model_view(Location, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -695,6 +699,7 @@ class RackRoleBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(RackRole, 'bulk_rename', path='rename', detail=False)
|
||||
class RackRoleBulkRenameView(generic.BulkRenameView):
|
||||
queryset = RackRole.objects.all()
|
||||
filterset = filtersets.RackRoleFilterSet
|
||||
|
||||
|
||||
@register_model_view(RackRole, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -760,6 +765,7 @@ class RackTypeBulkEditView(generic.BulkEditView):
|
||||
class RackTypeBulkRenameView(generic.BulkRenameView):
|
||||
queryset = RackType.objects.all()
|
||||
field_name = 'model'
|
||||
filterset = filtersets.RackTypeFilterSet
|
||||
|
||||
|
||||
@register_model_view(RackType, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -944,6 +950,7 @@ class RackBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(Rack, 'bulk_rename', path='rename', detail=False)
|
||||
class RackBulkRenameView(generic.BulkRenameView):
|
||||
queryset = Rack.objects.all()
|
||||
filterset = filtersets.RackFilterSet
|
||||
|
||||
|
||||
@register_model_view(Rack, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -1083,6 +1090,7 @@ class ManufacturerBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(Manufacturer, 'bulk_rename', path='rename', detail=False)
|
||||
class ManufacturerBulkRenameView(generic.BulkRenameView):
|
||||
queryset = Manufacturer.objects.all()
|
||||
filterset = filtersets.ManufacturerFilterSet
|
||||
|
||||
|
||||
@register_model_view(Manufacturer, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -1336,6 +1344,7 @@ class DeviceTypeBulkEditView(generic.BulkEditView):
|
||||
class DeviceTypeBulkRenameView(generic.BulkRenameView):
|
||||
queryset = DeviceType.objects.all()
|
||||
field_name = 'model'
|
||||
filterset = filtersets.DeviceTypeFilterSet
|
||||
|
||||
|
||||
@register_model_view(DeviceType, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -1397,6 +1406,7 @@ class ModuleTypeProfileBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(ModuleTypeProfile, 'bulk_rename', path='rename', detail=False)
|
||||
class ModuleTypeProfileBulkRenameView(generic.BulkRenameView):
|
||||
queryset = ModuleTypeProfile.objects.all()
|
||||
filterset = filtersets.ModuleTypeProfileFilterSet
|
||||
|
||||
|
||||
@register_model_view(ModuleTypeProfile, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -1612,6 +1622,7 @@ class ModuleTypeBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(ModuleType, 'bulk_rename', path='rename', detail=False)
|
||||
class ModuleTypeBulkRenameView(generic.BulkRenameView):
|
||||
queryset = ModuleType.objects.all()
|
||||
filterset = filtersets.ModuleTypeFilterSet
|
||||
|
||||
|
||||
@register_model_view(ModuleType, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -2100,6 +2111,7 @@ class DeviceRoleBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(DeviceRole, 'bulk_rename', path='rename', detail=False)
|
||||
class DeviceRoleBulkRenameView(generic.BulkRenameView):
|
||||
queryset = DeviceRole.objects.all()
|
||||
filterset = filtersets.DeviceRoleFilterSet
|
||||
|
||||
|
||||
@register_model_view(DeviceRole, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -2175,6 +2187,7 @@ class PlatformBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(Platform, 'bulk_rename', path='rename', detail=False)
|
||||
class PlatformBulkRenameView(generic.BulkRenameView):
|
||||
queryset = Platform.objects.all()
|
||||
filterset = filtersets.PlatformFilterSet
|
||||
|
||||
|
||||
@register_model_view(Platform, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -2582,6 +2595,7 @@ class ConsolePortBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(ConsolePort, 'bulk_rename', path='rename', detail=False)
|
||||
class ConsolePortBulkRenameView(generic.BulkRenameView):
|
||||
queryset = ConsolePort.objects.all()
|
||||
filterset = filtersets.ConsolePortFilterSet
|
||||
|
||||
|
||||
@register_model_view(ConsolePort, 'bulk_disconnect', path='disconnect', detail=False)
|
||||
@@ -2652,6 +2666,7 @@ class ConsoleServerPortBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(ConsoleServerPort, 'bulk_rename', path='rename', detail=False)
|
||||
class ConsoleServerPortBulkRenameView(generic.BulkRenameView):
|
||||
queryset = ConsoleServerPort.objects.all()
|
||||
filterset = filtersets.ConsoleServerPortFilterSet
|
||||
|
||||
|
||||
@register_model_view(ConsoleServerPort, 'bulk_disconnect', path='disconnect', detail=False)
|
||||
@@ -2722,6 +2737,7 @@ class PowerPortBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(PowerPort, 'bulk_rename', path='rename', detail=False)
|
||||
class PowerPortBulkRenameView(generic.BulkRenameView):
|
||||
queryset = PowerPort.objects.all()
|
||||
filterset = filtersets.PowerPortFilterSet
|
||||
|
||||
|
||||
@register_model_view(PowerPort, 'bulk_disconnect', path='disconnect', detail=False)
|
||||
@@ -2792,6 +2808,7 @@ class PowerOutletBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(PowerOutlet, 'bulk_rename', path='rename', detail=False)
|
||||
class PowerOutletBulkRenameView(generic.BulkRenameView):
|
||||
queryset = PowerOutlet.objects.all()
|
||||
filterset = filtersets.PowerOutletFilterSet
|
||||
|
||||
|
||||
@register_model_view(PowerOutlet, 'bulk_disconnect', path='disconnect', detail=False)
|
||||
@@ -2934,6 +2951,7 @@ class InterfaceBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(Interface, 'bulk_rename', path='rename', detail=False)
|
||||
class InterfaceBulkRenameView(generic.BulkRenameView):
|
||||
queryset = Interface.objects.all()
|
||||
filterset = filtersets.InterfaceFilterSet
|
||||
|
||||
|
||||
@register_model_view(Interface, 'bulk_disconnect', path='disconnect', detail=False)
|
||||
@@ -3005,6 +3023,7 @@ class FrontPortBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(FrontPort, 'bulk_rename', path='rename', detail=False)
|
||||
class FrontPortBulkRenameView(generic.BulkRenameView):
|
||||
queryset = FrontPort.objects.all()
|
||||
filterset = filtersets.FrontPortFilterSet
|
||||
|
||||
|
||||
@register_model_view(FrontPort, 'bulk_disconnect', path='disconnect', detail=False)
|
||||
@@ -3080,6 +3099,7 @@ class RearPortBulkRenameView(generic.BulkRenameView):
|
||||
@register_model_view(RearPort, 'bulk_disconnect', path='disconnect', detail=False)
|
||||
class RearPortBulkDisconnectView(BulkDisconnectView):
|
||||
queryset = RearPort.objects.all()
|
||||
filterset = filtersets.RearPortFilterSet
|
||||
|
||||
|
||||
@register_model_view(RearPort, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -3145,6 +3165,7 @@ class ModuleBayBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(ModuleBay, 'bulk_rename', path='rename', detail=False)
|
||||
class ModuleBayBulkRenameView(generic.BulkRenameView):
|
||||
queryset = ModuleBay.objects.all()
|
||||
filterset = filtersets.ModuleBayFilterSet
|
||||
|
||||
|
||||
@register_model_view(ModuleBay, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -3287,6 +3308,7 @@ class DeviceBayBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(DeviceBay, 'bulk_rename', path='rename', detail=False)
|
||||
class DeviceBayBulkRenameView(generic.BulkRenameView):
|
||||
queryset = DeviceBay.objects.all()
|
||||
filterset = filtersets.DeviceBayFilterSet
|
||||
|
||||
|
||||
@register_model_view(DeviceBay, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -3348,6 +3370,7 @@ class InventoryItemBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(InventoryItem, 'bulk_rename', path='rename', detail=False)
|
||||
class InventoryItemBulkRenameView(generic.BulkRenameView):
|
||||
queryset = InventoryItem.objects.all()
|
||||
filterset = filtersets.InventoryItemFilterSet
|
||||
|
||||
|
||||
@register_model_view(InventoryItem, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -3431,6 +3454,7 @@ class InventoryItemRoleBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(InventoryItemRole, 'bulk_rename', path='rename', detail=False)
|
||||
class InventoryItemRoleBulkRenameView(generic.BulkRenameView):
|
||||
queryset = InventoryItemRole.objects.all()
|
||||
filterset = filtersets.InventoryItemRoleFilterSet
|
||||
|
||||
|
||||
@register_model_view(InventoryItemRole, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -3634,6 +3658,7 @@ class CableBulkEditView(generic.BulkEditView):
|
||||
class CableBulkRenameView(generic.BulkRenameView):
|
||||
queryset = Cable.objects.all()
|
||||
field_name = 'label'
|
||||
filterset = filtersets.CableFilterSet
|
||||
|
||||
|
||||
@register_model_view(Cable, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -3931,6 +3956,7 @@ class VirtualChassisBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(VirtualChassis, 'bulk_rename', path='rename', detail=False)
|
||||
class VirtualChassisBulkRenameView(generic.BulkRenameView):
|
||||
queryset = VirtualChassis.objects.all()
|
||||
filterset = filtersets.VirtualChassisFilterSet
|
||||
|
||||
|
||||
@register_model_view(VirtualChassis, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -3993,6 +4019,7 @@ class PowerPanelBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(PowerPanel, 'bulk_rename', path='rename', detail=False)
|
||||
class PowerPanelBulkRenameView(generic.BulkRenameView):
|
||||
queryset = PowerPanel.objects.all()
|
||||
filterset = filtersets.PowerPanelFilterSet
|
||||
|
||||
|
||||
@register_model_view(PowerPanel, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -4050,6 +4077,7 @@ class PowerFeedBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(PowerFeed, 'bulk_rename', path='rename', detail=False)
|
||||
class PowerFeedBulkRenameView(generic.BulkRenameView):
|
||||
queryset = PowerFeed.objects.all()
|
||||
filterset = filtersets.PowerFeedFilterSet
|
||||
|
||||
|
||||
@register_model_view(PowerFeed, 'bulk_disconnect', path='disconnect', detail=False)
|
||||
@@ -4128,6 +4156,7 @@ class VirtualDeviceContextBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(VirtualDeviceContext, 'bulk_rename', path='rename', detail=False)
|
||||
class VirtualDeviceContextBulkRenameView(generic.BulkRenameView):
|
||||
queryset = VirtualDeviceContext.objects.all()
|
||||
filterset = filtersets.VirtualDeviceContextFilterSet
|
||||
|
||||
|
||||
@register_model_view(VirtualDeviceContext, 'bulk_delete', path='delete', detail=False)
|
||||
|
||||
@@ -4,7 +4,6 @@ from rest_framework.renderers import JSONRenderer
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.status import HTTP_400_BAD_REQUEST
|
||||
|
||||
from netbox.api.authentication import TokenWritePermission
|
||||
from netbox.api.renderers import TextRenderer
|
||||
from .serializers import ConfigTemplateSerializer
|
||||
|
||||
@@ -65,24 +64,12 @@ class RenderConfigMixin(ConfigTemplateRenderMixin):
|
||||
"""
|
||||
Provides a /render-config/ endpoint for REST API views whose model may have a ConfigTemplate assigned.
|
||||
"""
|
||||
|
||||
def get_permissions(self):
|
||||
# For render_config action, check only token write ability (not model permissions)
|
||||
if self.action == 'render_config':
|
||||
return [TokenWritePermission()]
|
||||
return super().get_permissions()
|
||||
|
||||
@action(detail=True, methods=['post'], url_path='render-config', renderer_classes=[JSONRenderer, TextRenderer])
|
||||
def render_config(self, request, pk):
|
||||
"""
|
||||
Resolve and render the preferred ConfigTemplate for this Device.
|
||||
"""
|
||||
# Override restrict() on the default queryset to enforce the render_config & view actions
|
||||
self.queryset = self.queryset.model.objects.restrict(request.user, 'render_config').restrict(
|
||||
request.user, 'view'
|
||||
)
|
||||
instance = self.get_object()
|
||||
|
||||
object_type = instance._meta.model_name
|
||||
configtemplate = instance.get_config_template()
|
||||
if not configtemplate:
|
||||
|
||||
@@ -5,6 +5,7 @@ from rest_framework import serializers
|
||||
from core.api.serializers_.jobs import JobSerializer
|
||||
from extras.models import Script
|
||||
from netbox.api.serializers import ValidatedModelSerializer
|
||||
from utilities.datetime import local_now
|
||||
|
||||
__all__ = (
|
||||
'ScriptDetailSerializer',
|
||||
@@ -66,11 +67,31 @@ class ScriptInputSerializer(serializers.Serializer):
|
||||
interval = serializers.IntegerField(required=False, allow_null=True)
|
||||
|
||||
def validate_schedule_at(self, value):
|
||||
if value and not self.context['script'].python_class.scheduling_enabled:
|
||||
raise serializers.ValidationError(_("Scheduling is not enabled for this script."))
|
||||
"""
|
||||
Validates the specified schedule time for a script execution.
|
||||
"""
|
||||
if value:
|
||||
if not self.context['script'].python_class.scheduling_enabled:
|
||||
raise serializers.ValidationError(_('Scheduling is not enabled for this script.'))
|
||||
if value < local_now():
|
||||
raise serializers.ValidationError(_('Scheduled time must be in the future.'))
|
||||
return value
|
||||
|
||||
def validate_interval(self, value):
|
||||
"""
|
||||
Validates the provided interval based on the script's scheduling configuration.
|
||||
"""
|
||||
if value and not self.context['script'].python_class.scheduling_enabled:
|
||||
raise serializers.ValidationError(_("Scheduling is not enabled for this script."))
|
||||
raise serializers.ValidationError(_('Scheduling is not enabled for this script.'))
|
||||
return value
|
||||
|
||||
def validate(self, data):
|
||||
"""
|
||||
Validates the given data and ensures the necessary fields are populated.
|
||||
"""
|
||||
# Set the schedule_at time to now if only an interval is provided
|
||||
# while handling the case where schedule_at is null.
|
||||
if data.get('interval') and not data.get('schedule_at'):
|
||||
data['schedule_at'] = local_now()
|
||||
|
||||
return super().validate(data)
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
from django.urls import include, path
|
||||
|
||||
from core.api.views import ObjectTypeViewSet
|
||||
from netbox.api.routers import NetBoxRouter
|
||||
from . import views
|
||||
|
||||
|
||||
router = NetBoxRouter()
|
||||
router.APIRootView = views.ExtrasRootView
|
||||
|
||||
@@ -27,6 +29,9 @@ router.register('config-context-profiles', views.ConfigContextProfileViewSet)
|
||||
router.register('config-templates', views.ConfigTemplateViewSet)
|
||||
router.register('scripts', views.ScriptViewSet, basename='script')
|
||||
|
||||
# TODO: Remove in NetBox v4.5
|
||||
router.register('object-types', ObjectTypeViewSet)
|
||||
|
||||
app_name = 'extras-api'
|
||||
urlpatterns = [
|
||||
path('dashboard/', views.DashboardView.as_view(), name='dashboard'),
|
||||
|
||||
@@ -16,7 +16,7 @@ from rq import Worker
|
||||
from extras import filtersets
|
||||
from extras.jobs import ScriptJob
|
||||
from extras.models import *
|
||||
from netbox.api.authentication import IsAuthenticatedOrLoginNotRequired, TokenWritePermission
|
||||
from netbox.api.authentication import IsAuthenticatedOrLoginNotRequired
|
||||
from netbox.api.features import SyncedDataMixin
|
||||
from netbox.api.metadata import ContentTypeMetadata
|
||||
from netbox.api.renderers import TextRenderer
|
||||
@@ -238,22 +238,13 @@ class ConfigTemplateViewSet(SyncedDataMixin, ConfigTemplateRenderMixin, NetBoxMo
|
||||
serializer_class = serializers.ConfigTemplateSerializer
|
||||
filterset_class = filtersets.ConfigTemplateFilterSet
|
||||
|
||||
def get_permissions(self):
|
||||
# For render action, check only token write ability (not model permissions)
|
||||
if self.action == 'render':
|
||||
return [TokenWritePermission()]
|
||||
return super().get_permissions()
|
||||
|
||||
@action(detail=True, methods=['post'], renderer_classes=[JSONRenderer, TextRenderer])
|
||||
def render(self, request, pk):
|
||||
"""
|
||||
Render a ConfigTemplate using the context data provided (if any). If the client requests "text/plain" data,
|
||||
return the raw rendered content, rather than serialized JSON.
|
||||
"""
|
||||
# Override restrict() on the default queryset to enforce the render & view actions
|
||||
self.queryset = self.queryset.model.objects.restrict(request.user, 'render').restrict(request.user, 'view')
|
||||
configtemplate = self.get_object()
|
||||
|
||||
context = request.data
|
||||
|
||||
return self.render_configtemplate(request, configtemplate, context)
|
||||
|
||||
@@ -1,114 +0,0 @@
|
||||
import django.core.validators
|
||||
import re
|
||||
from django.db import migrations, models
|
||||
|
||||
PATTERN_OPS_INDEXES = [
|
||||
'extras_configcontext_name_4bbfe25d_like',
|
||||
'extras_configcontextprofile_name_070de83b_like',
|
||||
'extras_customfield_name_2fe72707_like',
|
||||
'extras_customfieldchoiceset_name_963e63ea_like',
|
||||
'extras_customlink_name_daed2d18_like',
|
||||
'extras_eventrule_name_899453c6_like',
|
||||
'extras_notificationgroup_name_70b0a3f9_like',
|
||||
'extras_savedfilter_name_8a4bbd09_like',
|
||||
'extras_savedfilter_slug_4f93a959_like',
|
||||
'extras_tag_name_9550b3d9_like',
|
||||
'extras_tag_slug_aaa5b7e9_like',
|
||||
'extras_webhook_name_82cf60b5_like',
|
||||
]
|
||||
|
||||
|
||||
def remove_indexes(apps, schema_editor):
|
||||
for idx in PATTERN_OPS_INDEXES:
|
||||
schema_editor.execute(f'DROP INDEX IF EXISTS {idx}')
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('extras', '0133_make_cf_minmax_decimal'),
|
||||
('dcim', '0217_ci_collations'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
code=remove_indexes,
|
||||
reverse_code=migrations.RunPython.noop,
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='configcontext',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='configcontextprofile',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='customfield',
|
||||
name='name',
|
||||
field=models.CharField(
|
||||
db_collation='ci_natural_sort',
|
||||
max_length=50,
|
||||
unique=True,
|
||||
validators=[
|
||||
django.core.validators.RegexValidator(
|
||||
flags=re.RegexFlag['IGNORECASE'],
|
||||
message='Only alphanumeric characters and underscores are allowed.',
|
||||
regex='^[a-z0-9_]+$',
|
||||
),
|
||||
django.core.validators.RegexValidator(
|
||||
flags=re.RegexFlag['IGNORECASE'],
|
||||
inverse_match=True,
|
||||
message='Double underscores are not permitted in custom field names.',
|
||||
regex='__',
|
||||
),
|
||||
],
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='customfieldchoiceset',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='customlink',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='eventrule',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=150, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='notificationgroup',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='savedfilter',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='savedfilter',
|
||||
name='slug',
|
||||
field=models.SlugField(db_collation='case_insensitive', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='tag',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='tag',
|
||||
name='slug',
|
||||
field=models.SlugField(allow_unicode=True, db_collation='case_insensitive', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='webhook',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=150, unique=True),
|
||||
),
|
||||
]
|
||||
@@ -35,8 +35,7 @@ class ConfigContextProfile(SyncedDataMixin, PrimaryModel):
|
||||
name = models.CharField(
|
||||
verbose_name=_('name'),
|
||||
max_length=100,
|
||||
unique=True,
|
||||
db_collation='ci_natural_sort',
|
||||
unique=True
|
||||
)
|
||||
description = models.CharField(
|
||||
verbose_name=_('description'),
|
||||
@@ -78,8 +77,7 @@ class ConfigContext(SyncedDataMixin, CloningMixin, CustomLinksMixin, ChangeLogge
|
||||
name = models.CharField(
|
||||
verbose_name=_('name'),
|
||||
max_length=100,
|
||||
unique=True,
|
||||
db_collation='ci_natural_sort',
|
||||
unique=True
|
||||
)
|
||||
profile = models.ForeignKey(
|
||||
to='extras.ConfigContextProfile',
|
||||
|
||||
@@ -94,7 +94,6 @@ class CustomField(CloningMixin, ExportTemplatesMixin, ChangeLoggedModel):
|
||||
verbose_name=_('name'),
|
||||
max_length=50,
|
||||
unique=True,
|
||||
db_collation='ci_natural_sort',
|
||||
help_text=_('Internal field name'),
|
||||
validators=(
|
||||
RegexValidator(
|
||||
@@ -536,6 +535,15 @@ class CustomField(CloningMixin, ExportTemplatesMixin, ChangeLoggedModel):
|
||||
# URL
|
||||
elif self.type == CustomFieldTypeChoices.TYPE_URL:
|
||||
field = LaxURLField(assume_scheme='https', required=required, initial=initial)
|
||||
if self.validation_regex:
|
||||
field.validators = [
|
||||
RegexValidator(
|
||||
regex=self.validation_regex,
|
||||
message=mark_safe(_("Values must match this regex: <code>{regex}</code>").format(
|
||||
regex=escape(self.validation_regex)
|
||||
))
|
||||
)
|
||||
]
|
||||
|
||||
# JSON
|
||||
elif self.type == CustomFieldTypeChoices.TYPE_JSON:
|
||||
@@ -685,6 +693,13 @@ class CustomField(CloningMixin, ExportTemplatesMixin, ChangeLoggedModel):
|
||||
if self.validation_regex and not re.match(self.validation_regex, value):
|
||||
raise ValidationError(_("Value must match regex '{regex}'").format(regex=self.validation_regex))
|
||||
|
||||
# Validate URL field
|
||||
elif self.type == CustomFieldTypeChoices.TYPE_URL:
|
||||
if type(value) is not str:
|
||||
raise ValidationError(_("Value must be a string."))
|
||||
if self.validation_regex and not re.match(self.validation_regex, value):
|
||||
raise ValidationError(_("Value must match regex '{regex}'").format(regex=self.validation_regex))
|
||||
|
||||
# Validate integer
|
||||
elif self.type == CustomFieldTypeChoices.TYPE_INTEGER:
|
||||
if type(value) is not int:
|
||||
@@ -780,8 +795,7 @@ class CustomFieldChoiceSet(CloningMixin, ExportTemplatesMixin, ChangeLoggedModel
|
||||
"""
|
||||
name = models.CharField(
|
||||
max_length=100,
|
||||
unique=True,
|
||||
db_collation='ci_natural_sort',
|
||||
unique=True
|
||||
)
|
||||
description = models.CharField(
|
||||
max_length=200,
|
||||
|
||||
@@ -59,8 +59,7 @@ class EventRule(CustomFieldsMixin, ExportTemplatesMixin, TagsMixin, ChangeLogged
|
||||
name = models.CharField(
|
||||
verbose_name=_('name'),
|
||||
max_length=150,
|
||||
unique=True,
|
||||
db_collation='ci_natural_sort',
|
||||
unique=True
|
||||
)
|
||||
description = models.CharField(
|
||||
verbose_name=_('description'),
|
||||
@@ -165,8 +164,7 @@ class Webhook(CustomFieldsMixin, ExportTemplatesMixin, TagsMixin, ChangeLoggedMo
|
||||
name = models.CharField(
|
||||
verbose_name=_('name'),
|
||||
max_length=150,
|
||||
unique=True,
|
||||
db_collation='ci_natural_sort',
|
||||
unique=True
|
||||
)
|
||||
description = models.CharField(
|
||||
verbose_name=_('description'),
|
||||
@@ -309,8 +307,7 @@ class CustomLink(CloningMixin, ExportTemplatesMixin, ChangeLoggedModel):
|
||||
name = models.CharField(
|
||||
verbose_name=_('name'),
|
||||
max_length=100,
|
||||
unique=True,
|
||||
db_collation='ci_natural_sort',
|
||||
unique=True
|
||||
)
|
||||
enabled = models.BooleanField(
|
||||
verbose_name=_('enabled'),
|
||||
@@ -471,14 +468,12 @@ class SavedFilter(CloningMixin, ExportTemplatesMixin, ChangeLoggedModel):
|
||||
name = models.CharField(
|
||||
verbose_name=_('name'),
|
||||
max_length=100,
|
||||
unique=True,
|
||||
db_collation='ci_natural_sort',
|
||||
unique=True
|
||||
)
|
||||
slug = models.SlugField(
|
||||
verbose_name=_('slug'),
|
||||
max_length=100,
|
||||
unique=True,
|
||||
db_collation='case_insensitive',
|
||||
unique=True
|
||||
)
|
||||
description = models.CharField(
|
||||
verbose_name=_('description'),
|
||||
|
||||
@@ -125,8 +125,7 @@ class NotificationGroup(ChangeLoggedModel):
|
||||
name = models.CharField(
|
||||
verbose_name=_('name'),
|
||||
max_length=100,
|
||||
unique=True,
|
||||
db_collation='ci_natural_sort',
|
||||
unique=True
|
||||
)
|
||||
description = models.CharField(
|
||||
verbose_name=_('description'),
|
||||
|
||||
@@ -2,7 +2,7 @@ from django.conf import settings
|
||||
from django.db import models
|
||||
from django.urls import reverse
|
||||
from django.utils.text import slugify
|
||||
from django.utils.translation import gettext_lazy as _, pgettext_lazy
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from taggit.models import TagBase, GenericTaggedItemBase
|
||||
|
||||
from netbox.choices import ColorChoices
|
||||
@@ -25,21 +25,6 @@ class Tag(CloningMixin, ExportTemplatesMixin, ChangeLoggedModel, TagBase):
|
||||
id = models.BigAutoField(
|
||||
primary_key=True
|
||||
)
|
||||
# Override TagBase.name to set db_collation
|
||||
name = models.CharField(
|
||||
verbose_name=pgettext_lazy("A tag name", "name"),
|
||||
unique=True,
|
||||
max_length=100,
|
||||
db_collation='ci_natural_sort',
|
||||
)
|
||||
# Override TagBase.slug to set db_collation
|
||||
slug = models.SlugField(
|
||||
verbose_name=pgettext_lazy("A tag slug", "slug"),
|
||||
unique=True,
|
||||
max_length=100,
|
||||
allow_unicode=True,
|
||||
db_collation='case_insensitive',
|
||||
)
|
||||
color = ColorField(
|
||||
verbose_name=_('color'),
|
||||
default=ColorChoices.COLOR_GREY
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
import inspect
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
|
||||
import yaml
|
||||
from django import forms
|
||||
from django.conf import settings
|
||||
from django.core.files.storage import storages
|
||||
from django.core.validators import RegexValidator
|
||||
from django.utils import timezone
|
||||
@@ -487,7 +490,7 @@ class BaseScript:
|
||||
if self.fieldsets:
|
||||
fieldsets.extend(self.fieldsets)
|
||||
else:
|
||||
fields = list(name for name, __ in self._get_vars().items())
|
||||
fields = list(name for name, _ in self._get_vars().items())
|
||||
fieldsets.append((_('Script Data'), fields))
|
||||
|
||||
# Append the default fieldset if defined in the Meta class
|
||||
@@ -579,6 +582,40 @@ class BaseScript:
|
||||
self._log(message, obj, level=LogLevelChoices.LOG_FAILURE)
|
||||
self.failed = True
|
||||
|
||||
#
|
||||
# Convenience functions
|
||||
#
|
||||
|
||||
def load_yaml(self, filename):
|
||||
"""
|
||||
Return data from a YAML file
|
||||
"""
|
||||
# TODO: DEPRECATED: Remove this method in v4.5
|
||||
self._log(
|
||||
_("load_yaml is deprecated and will be removed in v4.5"),
|
||||
level=LogLevelChoices.LOG_WARNING
|
||||
)
|
||||
file_path = os.path.join(settings.SCRIPTS_ROOT, filename)
|
||||
with open(file_path, 'r') as datafile:
|
||||
data = yaml.load(datafile, Loader=yaml.SafeLoader)
|
||||
|
||||
return data
|
||||
|
||||
def load_json(self, filename):
|
||||
"""
|
||||
Return data from a JSON file
|
||||
"""
|
||||
# TODO: DEPRECATED: Remove this method in v4.5
|
||||
self._log(
|
||||
_("load_json is deprecated and will be removed in v4.5"),
|
||||
level=LogLevelChoices.LOG_WARNING
|
||||
)
|
||||
file_path = os.path.join(settings.SCRIPTS_ROOT, filename)
|
||||
with open(file_path, 'r') as datafile:
|
||||
data = json.load(datafile)
|
||||
|
||||
return data
|
||||
|
||||
#
|
||||
# Legacy Report functionality
|
||||
#
|
||||
|
||||
@@ -12,8 +12,7 @@ from dcim.models import Device, DeviceRole, DeviceType, Manufacturer, Rack, Loca
|
||||
from extras.choices import *
|
||||
from extras.models import *
|
||||
from extras.scripts import BooleanVar, IntegerVar, Script as PythonClass, StringVar
|
||||
from users.constants import TOKEN_PREFIX
|
||||
from users.models import Group, Token, User
|
||||
from users.models import Group, User
|
||||
from utilities.testing import APITestCase, APIViewTestCases
|
||||
|
||||
|
||||
@@ -856,61 +855,20 @@ class ConfigTemplateTest(APIViewTestCases.APIViewTestCase):
|
||||
)
|
||||
ConfigTemplate.objects.bulk_create(config_templates)
|
||||
|
||||
def test_render(self):
|
||||
configtemplate = ConfigTemplate.objects.first()
|
||||
|
||||
self.add_permissions('extras.render_configtemplate', 'extras.view_configtemplate')
|
||||
url = reverse('extras-api:configtemplate-render', kwargs={'pk': configtemplate.pk})
|
||||
response = self.client.post(url, {'foo': 'bar'}, format='json', **self.header)
|
||||
self.assertHttpStatus(response, status.HTTP_200_OK)
|
||||
self.assertEqual(response.data['content'], 'Foo: bar')
|
||||
|
||||
def test_render_without_permission(self):
|
||||
configtemplate = ConfigTemplate.objects.first()
|
||||
|
||||
# No permissions added - user has no render permission
|
||||
url = reverse('extras-api:configtemplate-render', kwargs={'pk': configtemplate.pk})
|
||||
response = self.client.post(url, {'foo': 'bar'}, format='json', **self.header)
|
||||
self.assertHttpStatus(response, status.HTTP_404_NOT_FOUND)
|
||||
|
||||
def test_render_token_write_enabled(self):
|
||||
configtemplate = ConfigTemplate.objects.first()
|
||||
|
||||
self.add_permissions('extras.render_configtemplate', 'extras.view_configtemplate')
|
||||
url = reverse('extras-api:configtemplate-render', kwargs={'pk': configtemplate.pk})
|
||||
|
||||
# Request without token auth should fail with PermissionDenied
|
||||
response = self.client.post(url, {'foo': 'bar'}, format='json')
|
||||
self.assertHttpStatus(response, status.HTTP_403_FORBIDDEN)
|
||||
|
||||
# Create token with write_enabled=False
|
||||
token = Token.objects.create(version=2, user=self.user, write_enabled=False)
|
||||
token_header = f'Bearer {TOKEN_PREFIX}{token.key}.{token.token}'
|
||||
|
||||
# Request with write-disabled token should fail
|
||||
response = self.client.post(url, {'foo': 'bar'}, format='json', HTTP_AUTHORIZATION=token_header)
|
||||
self.assertHttpStatus(response, status.HTTP_403_FORBIDDEN)
|
||||
|
||||
# Enable write and retry
|
||||
token.write_enabled = True
|
||||
token.save()
|
||||
response = self.client.post(url, {'foo': 'bar'}, format='json', HTTP_AUTHORIZATION=token_header)
|
||||
self.assertHttpStatus(response, status.HTTP_200_OK)
|
||||
|
||||
|
||||
class ScriptTest(APITestCase):
|
||||
|
||||
class TestScriptClass(PythonClass):
|
||||
|
||||
class Meta:
|
||||
name = "Test script"
|
||||
name = 'Test script'
|
||||
commit = True
|
||||
scheduling_enabled = True
|
||||
|
||||
var1 = StringVar()
|
||||
var2 = IntegerVar()
|
||||
var3 = BooleanVar()
|
||||
|
||||
def run(self, data, commit=True):
|
||||
|
||||
self.log_info(data['var1'])
|
||||
self.log_success(data['var2'])
|
||||
self.log_failure(data['var3'])
|
||||
@@ -921,14 +879,16 @@ class ScriptTest(APITestCase):
|
||||
def setUpTestData(cls):
|
||||
module = ScriptModule.objects.create(
|
||||
file_root=ManagedFileRootPathChoices.SCRIPTS,
|
||||
file_path='/var/tmp/script.py'
|
||||
file_path='script.py',
|
||||
)
|
||||
Script.objects.create(
|
||||
script = Script.objects.create(
|
||||
module=module,
|
||||
name="Test script",
|
||||
name='Test script',
|
||||
is_executable=True,
|
||||
)
|
||||
cls.url = reverse('extras-api:script-detail', kwargs={'pk': script.pk})
|
||||
|
||||
@property
|
||||
def python_class(self):
|
||||
return self.TestScriptClass
|
||||
|
||||
@@ -941,7 +901,7 @@ class ScriptTest(APITestCase):
|
||||
def test_get_script(self):
|
||||
module = ScriptModule.objects.get(
|
||||
file_root=ManagedFileRootPathChoices.SCRIPTS,
|
||||
file_path='/var/tmp/script.py'
|
||||
file_path='script.py',
|
||||
)
|
||||
script = module.scripts.all().first()
|
||||
url = reverse('extras-api:script-detail', kwargs={'pk': script.pk})
|
||||
@@ -952,6 +912,76 @@ class ScriptTest(APITestCase):
|
||||
self.assertEqual(response.data['vars']['var2'], 'IntegerVar')
|
||||
self.assertEqual(response.data['vars']['var3'], 'BooleanVar')
|
||||
|
||||
def test_schedule_script_past_time_rejected(self):
|
||||
"""
|
||||
Scheduling with past schedule_at should fail.
|
||||
"""
|
||||
self.add_permissions('extras.run_script')
|
||||
|
||||
payload = {
|
||||
'data': {'var1': 'hello', 'var2': 1, 'var3': False},
|
||||
'commit': True,
|
||||
'schedule_at': now() - datetime.timedelta(hours=1),
|
||||
}
|
||||
response = self.client.post(self.url, payload, format='json', **self.header)
|
||||
|
||||
self.assertHttpStatus(response, status.HTTP_400_BAD_REQUEST)
|
||||
self.assertIn('schedule_at', response.data)
|
||||
# Be tolerant of exact wording but ensure we failed on schedule_at being in the past
|
||||
self.assertIn('future', str(response.data['schedule_at']).lower())
|
||||
|
||||
def test_schedule_script_interval_only(self):
|
||||
"""
|
||||
Interval without schedule_at should auto-set schedule_at now.
|
||||
"""
|
||||
self.add_permissions('extras.run_script')
|
||||
|
||||
payload = {
|
||||
'data': {'var1': 'hello', 'var2': 1, 'var3': False},
|
||||
'commit': True,
|
||||
'interval': 60,
|
||||
}
|
||||
response = self.client.post(self.url, payload, format='json', **self.header)
|
||||
|
||||
self.assertHttpStatus(response, status.HTTP_200_OK)
|
||||
# The latest job is returned in the script detail serializer under "result"
|
||||
self.assertIn('result', response.data)
|
||||
self.assertEqual(response.data['result']['interval'], 60)
|
||||
# Ensure a start time was autopopulated
|
||||
self.assertIsNotNone(response.data['result']['scheduled'])
|
||||
|
||||
def test_schedule_script_when_disabled(self):
|
||||
"""
|
||||
Scheduling should fail when script.scheduling_enabled=False.
|
||||
"""
|
||||
self.add_permissions('extras.run_script')
|
||||
|
||||
# Temporarily disable scheduling on the in-test Python class
|
||||
original = getattr(self.TestScriptClass.Meta, 'scheduling_enabled', True)
|
||||
self.TestScriptClass.Meta.scheduling_enabled = False
|
||||
base = {
|
||||
'data': {'var1': 'hello', 'var2': 1, 'var3': False},
|
||||
'commit': True,
|
||||
}
|
||||
# Check both schedule_at and interval paths
|
||||
cases = [
|
||||
{**base, 'schedule_at': now() + datetime.timedelta(minutes=5)},
|
||||
{**base, 'interval': 60},
|
||||
]
|
||||
try:
|
||||
for case in cases:
|
||||
with self.subTest(case=list(case.keys())):
|
||||
response = self.client.post(self.url, case, format='json', **self.header)
|
||||
|
||||
self.assertHttpStatus(response, status.HTTP_400_BAD_REQUEST)
|
||||
# Error should be attached to whichever field we used
|
||||
key = 'schedule_at' if 'schedule_at' in case else 'interval'
|
||||
self.assertIn(key, response.data)
|
||||
self.assertIn('scheduling is not enabled', str(response.data[key]).lower())
|
||||
finally:
|
||||
# Restore the original setting for other tests
|
||||
self.TestScriptClass.Meta.scheduling_enabled = original
|
||||
|
||||
|
||||
class CreatedUpdatedFilterTest(APITestCase):
|
||||
|
||||
|
||||
@@ -1300,6 +1300,28 @@ class CustomFieldAPITest(APITestCase):
|
||||
response = self.client.patch(url, data, format='json', **self.header)
|
||||
self.assertHttpStatus(response, status.HTTP_200_OK)
|
||||
|
||||
def test_url_regex_validation(self):
|
||||
"""
|
||||
Test that validation_regex is applied to URL custom fields (fixes #20498).
|
||||
"""
|
||||
site2 = Site.objects.get(name='Site 2')
|
||||
url = reverse('dcim-api:site-detail', kwargs={'pk': site2.pk})
|
||||
self.add_permissions('dcim.change_site')
|
||||
|
||||
cf_url = CustomField.objects.get(name='url_field')
|
||||
cf_url.validation_regex = r'^https://' # Require HTTPS
|
||||
cf_url.save()
|
||||
|
||||
# Test invalid URL (http instead of https)
|
||||
data = {'custom_fields': {'url_field': 'http://example.com'}}
|
||||
response = self.client.patch(url, data, format='json', **self.header)
|
||||
self.assertHttpStatus(response, status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
# Test valid URL (https)
|
||||
data = {'custom_fields': {'url_field': 'https://example.com'}}
|
||||
response = self.client.patch(url, data, format='json', **self.header)
|
||||
self.assertHttpStatus(response, status.HTTP_200_OK)
|
||||
|
||||
def test_uniqueness_validation(self):
|
||||
# Create a unique custom field
|
||||
cf_text = CustomField.objects.get(name='text_field')
|
||||
|
||||
@@ -363,7 +363,7 @@ class EventRuleTest(APITestCase):
|
||||
body = json.loads(request.body)
|
||||
self.assertEqual(body['event'], 'created')
|
||||
self.assertEqual(body['timestamp'], job.kwargs['timestamp'])
|
||||
self.assertEqual(body['object_type'], 'dcim.site')
|
||||
self.assertEqual(body['model'], 'site')
|
||||
self.assertEqual(body['username'], 'testuser')
|
||||
self.assertEqual(body['request_id'], str(request_id))
|
||||
self.assertEqual(body['data']['name'], 'Site 1')
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import logging
|
||||
import tempfile
|
||||
from datetime import date, datetime, timezone
|
||||
from decimal import Decimal
|
||||
|
||||
@@ -7,6 +9,7 @@ from netaddr import IPAddress, IPNetwork
|
||||
|
||||
from dcim.models import DeviceRole
|
||||
from extras.scripts import *
|
||||
from utilities.testing import disable_logging
|
||||
|
||||
CHOICES = (
|
||||
('ff0000', 'Red'),
|
||||
@@ -32,6 +35,35 @@ JSON_DATA = """
|
||||
"""
|
||||
|
||||
|
||||
class ScriptTest(TestCase):
|
||||
|
||||
def test_load_yaml(self):
|
||||
datafile = tempfile.NamedTemporaryFile()
|
||||
datafile.write(bytes(YAML_DATA, 'UTF-8'))
|
||||
datafile.seek(0)
|
||||
|
||||
with disable_logging(level=logging.WARNING):
|
||||
data = Script().load_yaml(datafile.name)
|
||||
self.assertEqual(data, {
|
||||
'Foo': 123,
|
||||
'Bar': 456,
|
||||
'Baz': ['A', 'B', 'C'],
|
||||
})
|
||||
|
||||
def test_load_json(self):
|
||||
datafile = tempfile.NamedTemporaryFile()
|
||||
datafile.write(bytes(JSON_DATA, 'UTF-8'))
|
||||
datafile.seek(0)
|
||||
|
||||
with disable_logging(level=logging.WARNING):
|
||||
data = Script().load_json(datafile.name)
|
||||
self.assertEqual(data, {
|
||||
'Foo': 123,
|
||||
'Bar': 456,
|
||||
'Baz': ['A', 'B', 'C'],
|
||||
})
|
||||
|
||||
|
||||
class ScriptVariablesTest(TestCase):
|
||||
|
||||
def test_stringvar(self):
|
||||
|
||||
@@ -1,11 +1,14 @@
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.urls import reverse
|
||||
from django.test import tag
|
||||
|
||||
from core.choices import ManagedFileRootPathChoices
|
||||
from core.events import *
|
||||
from core.models import ObjectType
|
||||
from dcim.models import DeviceType, Manufacturer, Site
|
||||
from extras.choices import *
|
||||
from extras.models import *
|
||||
from extras.scripts import Script as PythonClass, IntegerVar, BooleanVar
|
||||
from users.models import Group, User
|
||||
from utilities.testing import ViewTestCases, TestCase
|
||||
|
||||
@@ -897,3 +900,70 @@ class ScriptListViewTest(TestCase):
|
||||
response = self.client.get(url, {'embedded': 'true'})
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertTemplateUsed(response, 'extras/inc/script_list_content.html')
|
||||
|
||||
|
||||
class ScriptValidationErrorTest(TestCase):
|
||||
user_permissions = ['extras.view_script', 'extras.run_script']
|
||||
|
||||
class TestScriptMixin:
|
||||
bar = IntegerVar(min_value=0, max_value=30, default=30)
|
||||
|
||||
class TestScriptClass(TestScriptMixin, PythonClass):
|
||||
class Meta:
|
||||
name = 'Test script'
|
||||
commit_default = False
|
||||
fieldsets = (("Logging", ("debug_mode",)),)
|
||||
|
||||
debug_mode = BooleanVar(default=False)
|
||||
|
||||
def run(self, data, commit):
|
||||
return "Complete"
|
||||
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
module = ScriptModule.objects.create(file_root=ManagedFileRootPathChoices.SCRIPTS, file_path='test_script.py')
|
||||
cls.script = Script.objects.create(module=module, name='Test script', is_executable=True)
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
Script.python_class = property(lambda self: ScriptValidationErrorTest.TestScriptClass)
|
||||
|
||||
@tag('regression')
|
||||
def test_script_validation_error_displays_message(self):
|
||||
from unittest.mock import patch
|
||||
|
||||
url = reverse('extras:script', kwargs={'pk': self.script.pk})
|
||||
|
||||
with patch('extras.views.get_workers_for_queue', return_value=['worker']):
|
||||
response = self.client.post(url, {'debug_mode': 'true', '_commit': 'true'})
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
messages = list(response.context['messages'])
|
||||
self.assertEqual(len(messages), 1)
|
||||
self.assertEqual(str(messages[0]), "bar: This field is required.")
|
||||
|
||||
@tag('regression')
|
||||
def test_script_validation_error_no_toast_for_fieldset_fields(self):
|
||||
from unittest.mock import patch, PropertyMock
|
||||
|
||||
class FieldsetScript(PythonClass):
|
||||
class Meta:
|
||||
name = 'Fieldset test'
|
||||
commit_default = False
|
||||
fieldsets = (("Fields", ("required_field",)),)
|
||||
|
||||
required_field = IntegerVar(min_value=10)
|
||||
|
||||
def run(self, data, commit):
|
||||
return "Complete"
|
||||
|
||||
url = reverse('extras:script', kwargs={'pk': self.script.pk})
|
||||
|
||||
with patch.object(Script, 'python_class', new_callable=PropertyMock) as mock_python_class:
|
||||
mock_python_class.return_value = FieldsetScript
|
||||
with patch('extras.views.get_workers_for_queue', return_value=['worker']):
|
||||
response = self.client.post(url, {'required_field': '5', '_commit': 'true'})
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
messages = list(response.context['messages'])
|
||||
self.assertEqual(len(messages), 0)
|
||||
|
||||
@@ -4,7 +4,7 @@ from django.contrib.auth.mixins import LoginRequiredMixin
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.core.paginator import EmptyPage
|
||||
from django.db.models import Count, Q
|
||||
from django.http import HttpResponseBadRequest, HttpResponseForbidden, HttpResponse
|
||||
from django.http import HttpResponseBadRequest, HttpResponseForbidden, HttpResponse, Http404
|
||||
from django.shortcuts import get_object_or_404, redirect, render
|
||||
from django.urls import reverse
|
||||
from django.utils import timezone
|
||||
@@ -25,7 +25,7 @@ from netbox.object_actions import *
|
||||
from netbox.views import generic
|
||||
from netbox.views.generic.mixins import TableMixin
|
||||
from utilities.forms import ConfirmationForm, get_field_value
|
||||
from utilities.htmx import htmx_partial
|
||||
from utilities.htmx import htmx_partial, htmx_maybe_redirect_current_page
|
||||
from utilities.paginator import EnhancedPaginator, get_paginate_count
|
||||
from utilities.query import count_related
|
||||
from utilities.querydict import normalize_querydict
|
||||
@@ -101,6 +101,7 @@ class CustomFieldBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(CustomField, 'bulk_rename', path='rename', detail=False)
|
||||
class CustomFieldBulkRenameView(generic.BulkRenameView):
|
||||
queryset = CustomField.objects.all()
|
||||
filterset = filtersets.CustomFieldFilterSet
|
||||
|
||||
|
||||
@register_model_view(CustomField, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -175,6 +176,7 @@ class CustomFieldChoiceSetBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(CustomFieldChoiceSet, 'bulk_rename', path='rename', detail=False)
|
||||
class CustomFieldChoiceSetBulkRenameView(generic.BulkRenameView):
|
||||
queryset = CustomFieldChoiceSet.objects.all()
|
||||
filterset = filtersets.CustomFieldChoiceSetFilterSet
|
||||
|
||||
|
||||
@register_model_view(CustomFieldChoiceSet, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -230,6 +232,7 @@ class CustomLinkBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(CustomLink, 'bulk_rename', path='rename', detail=False)
|
||||
class CustomLinkBulkRenameView(generic.BulkRenameView):
|
||||
queryset = CustomLink.objects.all()
|
||||
filterset = filtersets.CustomLinkFilterSet
|
||||
|
||||
|
||||
@register_model_view(CustomLink, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -286,6 +289,7 @@ class ExportTemplateBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(ExportTemplate, 'bulk_rename', path='rename', detail=False)
|
||||
class ExportTemplateBulkRenameView(generic.BulkRenameView):
|
||||
queryset = ExportTemplate.objects.all()
|
||||
filterset = filtersets.ExportTemplateFilterSet
|
||||
|
||||
|
||||
@register_model_view(ExportTemplate, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -351,6 +355,7 @@ class SavedFilterBulkEditView(SharedObjectViewMixin, generic.BulkEditView):
|
||||
@register_model_view(SavedFilter, 'bulk_rename', path='rename', detail=False)
|
||||
class SavedFilterBulkRenameView(generic.BulkRenameView):
|
||||
queryset = SavedFilter.objects.all()
|
||||
filterset = filtersets.SavedFilterFilterSet
|
||||
|
||||
|
||||
@register_model_view(SavedFilter, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -413,6 +418,7 @@ class TableConfigBulkEditView(SharedObjectViewMixin, generic.BulkEditView):
|
||||
@register_model_view(TableConfig, 'bulk_rename', path='rename', detail=False)
|
||||
class TableConfigBulkRenameView(generic.BulkRenameView):
|
||||
queryset = TableConfig.objects.all()
|
||||
filterset = filtersets.TableConfigFilterSet
|
||||
|
||||
|
||||
@register_model_view(TableConfig, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -499,6 +505,7 @@ class NotificationGroupBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(NotificationGroup, 'bulk_rename', path='rename', detail=False)
|
||||
class NotificationGroupBulkRenameView(generic.BulkRenameView):
|
||||
queryset = NotificationGroup.objects.all()
|
||||
filterset = filtersets.NotificationGroupFilterSet
|
||||
|
||||
|
||||
@register_model_view(NotificationGroup, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -518,8 +525,9 @@ class NotificationsView(LoginRequiredMixin, View):
|
||||
"""
|
||||
def get(self, request):
|
||||
return render(request, 'htmx/notifications.html', {
|
||||
'notifications': request.user.notifications.unread(),
|
||||
'notifications': request.user.notifications.unread()[:10],
|
||||
'total_count': request.user.notifications.count(),
|
||||
'unread_count': request.user.notifications.unread().count(),
|
||||
})
|
||||
|
||||
|
||||
@@ -528,6 +536,7 @@ class NotificationReadView(LoginRequiredMixin, View):
|
||||
"""
|
||||
Mark the Notification read and redirect the user to its attached object.
|
||||
"""
|
||||
|
||||
def get(self, request, pk):
|
||||
# Mark the Notification as read
|
||||
notification = get_object_or_404(request.user.notifications, pk=pk)
|
||||
@@ -541,18 +550,48 @@ class NotificationReadView(LoginRequiredMixin, View):
|
||||
return redirect('account:notifications')
|
||||
|
||||
|
||||
@register_model_view(Notification, name='dismiss_all', path='dismiss-all', detail=False)
|
||||
class NotificationDismissAllView(LoginRequiredMixin, View):
|
||||
"""
|
||||
Convenience view to clear all *unread* notifications for the current user.
|
||||
"""
|
||||
|
||||
def get(self, request):
|
||||
request.user.notifications.unread().delete()
|
||||
if htmx_partial(request):
|
||||
# If a user is currently on the notification page, redirect there (full repaint)
|
||||
redirect_resp = htmx_maybe_redirect_current_page(request, 'account:notifications', preserve_query=True)
|
||||
if redirect_resp:
|
||||
return redirect_resp
|
||||
|
||||
return render(request, 'htmx/notifications.html', {
|
||||
'notifications': request.user.notifications.unread()[:10],
|
||||
'total_count': request.user.notifications.count(),
|
||||
'unread_count': request.user.notifications.unread().count(),
|
||||
})
|
||||
return redirect('account:notifications')
|
||||
|
||||
|
||||
@register_model_view(Notification, 'dismiss')
|
||||
class NotificationDismissView(LoginRequiredMixin, View):
|
||||
"""
|
||||
A convenience view which allows deleting notifications with one click.
|
||||
"""
|
||||
|
||||
def get(self, request, pk):
|
||||
notification = get_object_or_404(request.user.notifications, pk=pk)
|
||||
notification.delete()
|
||||
|
||||
if htmx_partial(request):
|
||||
# If a user is currently on the notification page, redirect there (full repaint)
|
||||
redirect_resp = htmx_maybe_redirect_current_page(request, 'account:notifications', preserve_query=True)
|
||||
if redirect_resp:
|
||||
return redirect_resp
|
||||
|
||||
return render(request, 'htmx/notifications.html', {
|
||||
'notifications': request.user.notifications.unread()[:10],
|
||||
'total_count': request.user.notifications.count(),
|
||||
'unread_count': request.user.notifications.unread().count(),
|
||||
})
|
||||
|
||||
return redirect('account:notifications')
|
||||
@@ -650,6 +689,7 @@ class WebhookBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(Webhook, 'bulk_rename', path='rename', detail=False)
|
||||
class WebhookBulkRenameView(generic.BulkRenameView):
|
||||
queryset = Webhook.objects.all()
|
||||
filterset = filtersets.WebhookFilterSet
|
||||
|
||||
|
||||
@register_model_view(Webhook, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -705,6 +745,7 @@ class EventRuleBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(EventRule, 'bulk_rename', path='rename', detail=False)
|
||||
class EventRuleBulkRenameView(generic.BulkRenameView):
|
||||
queryset = EventRule.objects.all()
|
||||
filterset = filtersets.EventRuleFilterSet
|
||||
|
||||
|
||||
@register_model_view(EventRule, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -841,6 +882,7 @@ class ConfigContextProfileBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(ConfigContextProfile, 'bulk_rename', path='rename', detail=False)
|
||||
class ConfigContextProfileBulkRenameView(generic.BulkRenameView):
|
||||
queryset = ConfigContextProfile.objects.all()
|
||||
filterset = filtersets.ConfigContextProfileFilterSet
|
||||
|
||||
|
||||
@register_model_view(ConfigContextProfile, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -929,6 +971,7 @@ class ConfigContextBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(ConfigContext, 'bulk_rename', path='rename', detail=False)
|
||||
class ConfigContextBulkRenameView(generic.BulkRenameView):
|
||||
queryset = ConfigContext.objects.all()
|
||||
filterset = filtersets.ConfigContextFilterSet
|
||||
|
||||
|
||||
@register_model_view(ConfigContext, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -1020,6 +1063,7 @@ class ConfigTemplateBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(ConfigTemplate, 'bulk_rename', path='rename', detail=False)
|
||||
class ConfigTemplateBulkRenameView(generic.BulkRenameView):
|
||||
queryset = ConfigTemplate.objects.all()
|
||||
filterset = filtersets.ConfigTemplateFilterSet
|
||||
|
||||
|
||||
@register_model_view(ConfigTemplate, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -1143,6 +1187,7 @@ class ImageAttachmentBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(ImageAttachment, 'bulk_rename', path='rename', detail=False)
|
||||
class ImageAttachmentBulkRenameView(generic.BulkRenameView):
|
||||
queryset = ImageAttachment.objects.all()
|
||||
filterset = filtersets.ImageAttachmentFilterSet
|
||||
|
||||
|
||||
@register_model_view(ImageAttachment, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -1485,6 +1530,15 @@ class ScriptView(BaseScriptView):
|
||||
)
|
||||
|
||||
return redirect('extras:script_result', job_pk=job.pk)
|
||||
else:
|
||||
fieldset_fields = {field for _, fields in script_class.get_fieldsets() for field in fields}
|
||||
hidden_errors = {
|
||||
field: errors for field, errors in form.errors.items()
|
||||
if field not in fieldset_fields
|
||||
}
|
||||
if hidden_errors:
|
||||
error_msg = '; '.join(f"{field}: {', '.join(errors)}" for field, errors in hidden_errors.items())
|
||||
messages.error(request, error_msg)
|
||||
|
||||
return render(request, 'extras/script.html', {
|
||||
'object': script,
|
||||
|
||||
@@ -52,6 +52,7 @@ def send_webhook(event_rule, object_type, event_type, data, timestamp, username,
|
||||
'event': WEBHOOK_EVENT_TYPES.get(event_type, event_type),
|
||||
'timestamp': timestamp,
|
||||
'object_type': '.'.join(object_type.natural_key()),
|
||||
'model': object_type.model,
|
||||
'username': username,
|
||||
'request_id': request.id if request else None,
|
||||
'data': data,
|
||||
@@ -99,7 +100,7 @@ def send_webhook(event_rule, object_type, event_type, data, timestamp, username,
|
||||
'data': body.encode('utf8'),
|
||||
}
|
||||
logger.info(
|
||||
f"Sending {params['method']} request to {params['url']} ({context['object_type']} {context['event']})"
|
||||
f"Sending {params['method']} request to {params['url']} ({context['model']} {context['event']})"
|
||||
)
|
||||
logger.debug(params)
|
||||
try:
|
||||
|
||||
@@ -369,6 +369,20 @@ class IPAddressImportForm(NetBoxModelImportForm):
|
||||
**{f"virtual_machine__{self.fields['virtual_machine'].to_field_name}": data['virtual_machine']}
|
||||
)
|
||||
|
||||
def clean_is_primary(self):
|
||||
# Make sure is_primary is None when it's not included in the uploaded data
|
||||
if 'is_primary' not in self.data:
|
||||
return None
|
||||
else:
|
||||
return self.cleaned_data['is_primary']
|
||||
|
||||
def clean_is_oob(self):
|
||||
# Make sure is_oob is None when it's not included in the uploaded data
|
||||
if 'is_oob' not in self.data:
|
||||
return None
|
||||
else:
|
||||
return self.cleaned_data['is_oob']
|
||||
|
||||
def clean(self):
|
||||
super().clean()
|
||||
|
||||
@@ -411,18 +425,18 @@ class IPAddressImportForm(NetBoxModelImportForm):
|
||||
ipaddress = super().save(*args, **kwargs)
|
||||
|
||||
# Set as primary for device/VM
|
||||
if self.cleaned_data.get('is_primary'):
|
||||
if self.cleaned_data.get('is_primary') is not None:
|
||||
parent = self.cleaned_data.get('device') or self.cleaned_data.get('virtual_machine')
|
||||
if self.instance.address.version == 4:
|
||||
parent.primary_ip4 = ipaddress
|
||||
parent.primary_ip4 = ipaddress if self.cleaned_data.get('is_primary') else None
|
||||
elif self.instance.address.version == 6:
|
||||
parent.primary_ip6 = ipaddress
|
||||
parent.primary_ip6 = ipaddress if self.cleaned_data.get('is_primary') else None
|
||||
parent.save()
|
||||
|
||||
# Set as OOB for device
|
||||
if self.cleaned_data.get('is_oob'):
|
||||
if self.cleaned_data.get('is_oob') is not None:
|
||||
parent = self.cleaned_data.get('device')
|
||||
parent.oob_ip = ipaddress
|
||||
parent.oob_ip = ipaddress if self.cleaned_data.get('is_oob') else None
|
||||
parent.save()
|
||||
|
||||
return ipaddress
|
||||
|
||||
@@ -79,12 +79,36 @@ class ASNRangeFilter(TenancyFilterMixin, OrganizationalModelFilterMixin):
|
||||
|
||||
@strawberry_django.filter_type(models.Aggregate, lookups=True)
|
||||
class AggregateFilter(ContactFilterMixin, TenancyFilterMixin, PrimaryModelFilterMixin):
|
||||
prefix: Annotated['PrefixFilter', strawberry.lazy('ipam.graphql.filters')] | None = strawberry_django.filter_field()
|
||||
prefix_id: ID | None = strawberry_django.filter_field()
|
||||
prefix: FilterLookup[str] | None = strawberry_django.filter_field()
|
||||
rir: Annotated['RIRFilter', strawberry.lazy('ipam.graphql.filters')] | None = strawberry_django.filter_field()
|
||||
rir_id: ID | None = strawberry_django.filter_field()
|
||||
date_added: DateFilterLookup[date] | None = strawberry_django.filter_field()
|
||||
|
||||
@strawberry_django.filter_field()
|
||||
def contains(self, value: list[str], prefix) -> Q:
|
||||
"""
|
||||
Return aggregates whose `prefix` contains any of the supplied networks.
|
||||
Mirrors PrefixFilter.contains but operates on the Aggregate.prefix field itself.
|
||||
"""
|
||||
if not value:
|
||||
return Q()
|
||||
q = Q()
|
||||
for subnet in value:
|
||||
try:
|
||||
query = str(netaddr.IPNetwork(subnet.strip()).cidr)
|
||||
except (AddrFormatError, ValueError):
|
||||
continue
|
||||
q |= Q(**{f"{prefix}prefix__net_contains": query})
|
||||
return q
|
||||
|
||||
@strawberry_django.filter_field()
|
||||
def family(
|
||||
self,
|
||||
value: Annotated['IPAddressFamilyEnum', strawberry.lazy('ipam.graphql.enums')],
|
||||
prefix,
|
||||
) -> Q:
|
||||
return Q(**{f"{prefix}prefix__family": value.value})
|
||||
|
||||
|
||||
@strawberry_django.filter_type(models.FHRPGroup, lookups=True)
|
||||
class FHRPGroupFilter(PrimaryModelFilterMixin):
|
||||
@@ -119,28 +143,28 @@ class FHRPGroupAssignmentFilter(BaseObjectTypeFilterMixin, ChangeLogFilterMixin)
|
||||
)
|
||||
|
||||
@strawberry_django.filter_field()
|
||||
def device_id(self, queryset, value: list[str], prefix) -> Q:
|
||||
return self.filter_device('id', value)
|
||||
def device_id(self, value: list[str], prefix) -> Q:
|
||||
return self.filter_device('id', value, prefix)
|
||||
|
||||
@strawberry_django.filter_field()
|
||||
def device(self, value: list[str], prefix) -> Q:
|
||||
return self.filter_device('name', value)
|
||||
return self.filter_device('name', value, prefix)
|
||||
|
||||
@strawberry_django.filter_field()
|
||||
def virtual_machine_id(self, value: list[str], prefix) -> Q:
|
||||
return Q(interface_id__in=VMInterface.objects.filter(virtual_machine_id__in=value))
|
||||
return Q(**{f"{prefix}interface_id__in": VMInterface.objects.filter(virtual_machine_id__in=value)})
|
||||
|
||||
@strawberry_django.filter_field()
|
||||
def virtual_machine(self, value: list[str], prefix) -> Q:
|
||||
return Q(interface_id__in=VMInterface.objects.filter(virtual_machine__name__in=value))
|
||||
return Q(**{f"{prefix}interface_id__in": VMInterface.objects.filter(virtual_machine__name__in=value)})
|
||||
|
||||
def filter_device(self, field, value) -> Q:
|
||||
def filter_device(self, field, value, prefix) -> Q:
|
||||
"""Helper to standardize logic for device and device_id filters"""
|
||||
devices = Device.objects.filter(**{f'{field}__in': value})
|
||||
interface_ids = []
|
||||
for device in devices:
|
||||
interface_ids.extend(device.vc_interfaces().values_list('id', flat=True))
|
||||
return Q(interface_id__in=interface_ids)
|
||||
return Q(**{f"{prefix}interface_id__in": interface_ids})
|
||||
|
||||
|
||||
@strawberry_django.filter_type(models.IPAddress, lookups=True)
|
||||
@@ -170,7 +194,7 @@ class IPAddressFilter(ContactFilterMixin, TenancyFilterMixin, PrimaryModelFilter
|
||||
|
||||
@strawberry_django.filter_field()
|
||||
def assigned(self, value: bool, prefix) -> Q:
|
||||
return Q(assigned_object_id__isnull=(not value))
|
||||
return Q(**{f"{prefix}assigned_object_id__isnull": not value})
|
||||
|
||||
@strawberry_django.filter_field()
|
||||
def parent(self, value: list[str], prefix) -> Q:
|
||||
@@ -180,9 +204,9 @@ class IPAddressFilter(ContactFilterMixin, TenancyFilterMixin, PrimaryModelFilter
|
||||
for subnet in value:
|
||||
try:
|
||||
query = str(netaddr.IPNetwork(subnet.strip()).cidr)
|
||||
q |= Q(address__net_host_contained=query)
|
||||
except (AddrFormatError, ValueError):
|
||||
return Q()
|
||||
continue
|
||||
q |= Q(**{f"{prefix}address__net_host_contained": query})
|
||||
return q
|
||||
|
||||
@strawberry_django.filter_field()
|
||||
@@ -217,9 +241,14 @@ class IPRangeFilter(ContactFilterMixin, TenancyFilterMixin, PrimaryModelFilterMi
|
||||
for subnet in value:
|
||||
try:
|
||||
query = str(netaddr.IPNetwork(subnet.strip()).cidr)
|
||||
q |= Q(start_address__net_host_contained=query, end_address__net_host_contained=query)
|
||||
except (AddrFormatError, ValueError):
|
||||
return Q()
|
||||
continue
|
||||
q |= Q(
|
||||
**{
|
||||
f"{prefix}start_address__net_host_contained": query,
|
||||
f"{prefix}end_address__net_host_contained": query,
|
||||
}
|
||||
)
|
||||
return q
|
||||
|
||||
@strawberry_django.filter_field()
|
||||
@@ -228,10 +257,17 @@ class IPRangeFilter(ContactFilterMixin, TenancyFilterMixin, PrimaryModelFilterMi
|
||||
return Q()
|
||||
q = Q()
|
||||
for subnet in value:
|
||||
net = netaddr.IPNetwork(subnet.strip())
|
||||
try:
|
||||
net = netaddr.IPNetwork(subnet.strip())
|
||||
query_start = str(netaddr.IPAddress(net.first))
|
||||
query_end = str(netaddr.IPAddress(net.last))
|
||||
except (AddrFormatError, ValueError):
|
||||
continue
|
||||
q |= Q(
|
||||
start_address__host__inet__lte=str(netaddr.IPAddress(net.first)),
|
||||
end_address__host__inet__gte=str(netaddr.IPAddress(net.last)),
|
||||
**{
|
||||
f"{prefix}start_address__host__inet__lte": query_start,
|
||||
f"{prefix}end_address__host__inet__gte": query_end,
|
||||
}
|
||||
)
|
||||
return q
|
||||
|
||||
@@ -257,10 +293,21 @@ class PrefixFilter(ContactFilterMixin, ScopedFilterMixin, TenancyFilterMixin, Pr
|
||||
return Q()
|
||||
q = Q()
|
||||
for subnet in value:
|
||||
query = str(netaddr.IPNetwork(subnet.strip()).cidr)
|
||||
q |= Q(prefix__net_contains=query)
|
||||
try:
|
||||
query = str(netaddr.IPNetwork(subnet.strip()).cidr)
|
||||
except (AddrFormatError, ValueError):
|
||||
continue
|
||||
q |= Q(**{f"{prefix}prefix__net_contains": query})
|
||||
return q
|
||||
|
||||
@strawberry_django.filter_field()
|
||||
def family(
|
||||
self,
|
||||
value: Annotated['IPAddressFamilyEnum', strawberry.lazy('ipam.graphql.enums')],
|
||||
prefix,
|
||||
) -> Q:
|
||||
return Q(**{f"{prefix}prefix__family": value.value})
|
||||
|
||||
|
||||
@strawberry_django.filter_type(models.RIR, lookups=True)
|
||||
class RIRFilter(OrganizationalModelFilterMixin):
|
||||
|
||||
@@ -1,100 +0,0 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
PATTERN_OPS_INDEXES = [
|
||||
'ipam_asnrange_name_c7585e73_like',
|
||||
'ipam_asnrange_slug_c8a7d8a1_like',
|
||||
'ipam_rir_name_64a71982_like',
|
||||
'ipam_rir_slug_ff1a369a_like',
|
||||
'ipam_role_name_13784849_like',
|
||||
'ipam_role_slug_309ca14c_like',
|
||||
'ipam_routetarget_name_212be79f_like',
|
||||
'ipam_servicetemplate_name_1a2f3410_like',
|
||||
'ipam_vlangroup_slug_40abcf6b_like',
|
||||
'ipam_vlantranslationpolicy_name_17e0a007_like',
|
||||
'ipam_vrf_rd_0ac1bde1_like',
|
||||
]
|
||||
|
||||
|
||||
def remove_indexes(apps, schema_editor):
|
||||
for idx in PATTERN_OPS_INDEXES:
|
||||
schema_editor.execute(f'DROP INDEX IF EXISTS {idx}')
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('ipam', '0082_add_prefix_network_containment_indexes'),
|
||||
('dcim', '0217_ci_collations'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
code=remove_indexes,
|
||||
reverse_code=migrations.RunPython.noop,
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='asnrange',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='asnrange',
|
||||
name='slug',
|
||||
field=models.SlugField(db_collation='case_insensitive', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='rir',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='rir',
|
||||
name='slug',
|
||||
field=models.SlugField(db_collation='case_insensitive', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='role',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='role',
|
||||
name='slug',
|
||||
field=models.SlugField(db_collation='case_insensitive', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='routetarget',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=21, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='servicetemplate',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='vlan',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=64),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='vlangroup',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='vlangroup',
|
||||
name='slug',
|
||||
field=models.SlugField(db_collation='case_insensitive', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='vlantranslationpolicy',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='vrf',
|
||||
name='rd',
|
||||
field=models.CharField(blank=True, db_collation='case_insensitive', max_length=21, null=True, unique=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,27 @@
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
def populate_vlangroup_total_vlan_ids(apps, schema_editor):
|
||||
VLANGroup = apps.get_model('ipam', 'VLANGroup')
|
||||
db_alias = schema_editor.connection.alias
|
||||
|
||||
vlan_groups = VLANGroup.objects.using(db_alias).only('id', 'vid_ranges')
|
||||
for group in vlan_groups:
|
||||
total_vlan_ids = 0
|
||||
if group.vid_ranges:
|
||||
for r in group.vid_ranges:
|
||||
# Half-open [lo, hi): length is (hi - lo).
|
||||
if r is not None and r.lower is not None and r.upper is not None:
|
||||
total_vlan_ids += r.upper - r.lower
|
||||
group._total_vlan_ids = total_vlan_ids
|
||||
VLANGroup.objects.using(db_alias).bulk_update(vlan_groups, ['_total_vlan_ids'], batch_size=100)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('ipam', '0082_add_prefix_network_containment_indexes'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(populate_vlangroup_total_vlan_ids, migrations.RunPython.noop),
|
||||
]
|
||||
@@ -18,7 +18,12 @@ class ASNRange(OrganizationalModel):
|
||||
verbose_name=_('name'),
|
||||
max_length=100,
|
||||
unique=True,
|
||||
db_collation='ci_natural_sort',
|
||||
db_collation="natural_sort"
|
||||
)
|
||||
slug = models.SlugField(
|
||||
verbose_name=_('slug'),
|
||||
max_length=100,
|
||||
unique=True
|
||||
)
|
||||
rir = models.ForeignKey(
|
||||
to='ipam.RIR',
|
||||
|
||||
@@ -50,8 +50,7 @@ class ServiceTemplate(ServiceBase, PrimaryModel):
|
||||
name = models.CharField(
|
||||
verbose_name=_('name'),
|
||||
max_length=100,
|
||||
unique=True,
|
||||
db_collation='ci_natural_sort',
|
||||
unique=True
|
||||
)
|
||||
|
||||
class Meta:
|
||||
|
||||
@@ -37,12 +37,11 @@ class VLANGroup(OrganizationalModel):
|
||||
name = models.CharField(
|
||||
verbose_name=_('name'),
|
||||
max_length=100,
|
||||
db_collation='ci_natural_sort',
|
||||
db_collation="natural_sort"
|
||||
)
|
||||
slug = models.SlugField(
|
||||
verbose_name=_('slug'),
|
||||
max_length=100,
|
||||
db_collation='case_insensitive',
|
||||
max_length=100
|
||||
)
|
||||
scope_type = models.ForeignKey(
|
||||
to='contenttypes.ContentType',
|
||||
@@ -133,7 +132,8 @@ class VLANGroup(OrganizationalModel):
|
||||
def save(self, *args, **kwargs):
|
||||
self._total_vlan_ids = 0
|
||||
for vid_range in self.vid_ranges:
|
||||
self._total_vlan_ids += vid_range.upper - vid_range.lower + 1
|
||||
# VID range is inclusive on lower-bound, exclusive on upper-bound
|
||||
self._total_vlan_ids += vid_range.upper - vid_range.lower
|
||||
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
@@ -215,8 +215,7 @@ class VLAN(PrimaryModel):
|
||||
)
|
||||
name = models.CharField(
|
||||
verbose_name=_('name'),
|
||||
max_length=64,
|
||||
db_collation='ci_natural_sort',
|
||||
max_length=64
|
||||
)
|
||||
tenant = models.ForeignKey(
|
||||
to='tenancy.Tenant',
|
||||
@@ -364,7 +363,6 @@ class VLANTranslationPolicy(PrimaryModel):
|
||||
verbose_name=_('name'),
|
||||
max_length=100,
|
||||
unique=True,
|
||||
db_collation='ci_natural_sort',
|
||||
)
|
||||
|
||||
class Meta:
|
||||
|
||||
@@ -19,12 +19,11 @@ class VRF(PrimaryModel):
|
||||
name = models.CharField(
|
||||
verbose_name=_('name'),
|
||||
max_length=100,
|
||||
db_collation='natural_sort',
|
||||
db_collation="natural_sort"
|
||||
)
|
||||
rd = models.CharField(
|
||||
max_length=VRF_RD_MAX_LENGTH,
|
||||
unique=True,
|
||||
db_collation='case_insensitive',
|
||||
blank=True,
|
||||
null=True,
|
||||
verbose_name=_('route distinguisher'),
|
||||
@@ -76,8 +75,8 @@ class RouteTarget(PrimaryModel):
|
||||
verbose_name=_('name'),
|
||||
max_length=VRF_RD_MAX_LENGTH, # Same format options as VRF RD (RFC 4360 section 4)
|
||||
unique=True,
|
||||
db_collation='ci_natural_sort',
|
||||
help_text=_('Route target value (formatted in accordance with RFC 4360)'),
|
||||
db_collation="natural_sort"
|
||||
)
|
||||
tenant = models.ForeignKey(
|
||||
to='tenancy.Tenant',
|
||||
|
||||
@@ -3,7 +3,7 @@ from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from ipam.models import *
|
||||
from netbox.tables import NetBoxTable, columns
|
||||
from tenancy.tables import TenancyColumnsMixin
|
||||
from tenancy.tables import ContactsColumnMixin, TenancyColumnsMixin
|
||||
|
||||
__all__ = (
|
||||
'ASNTable',
|
||||
@@ -36,7 +36,7 @@ class ASNRangeTable(TenancyColumnsMixin, NetBoxTable):
|
||||
default_columns = ('pk', 'name', 'rir', 'start', 'end', 'tenant', 'asn_count', 'description')
|
||||
|
||||
|
||||
class ASNTable(TenancyColumnsMixin, NetBoxTable):
|
||||
class ASNTable(TenancyColumnsMixin, ContactsColumnMixin, NetBoxTable):
|
||||
asn = tables.Column(
|
||||
verbose_name=_('ASN'),
|
||||
linkify=True
|
||||
@@ -76,7 +76,7 @@ class ASNTable(TenancyColumnsMixin, NetBoxTable):
|
||||
model = ASN
|
||||
fields = (
|
||||
'pk', 'asn', 'asn_asdot', 'rir', 'site_count', 'provider_count', 'tenant', 'tenant_group', 'description',
|
||||
'comments', 'sites', 'tags', 'created', 'last_updated', 'actions',
|
||||
'contacts', 'comments', 'sites', 'tags', 'created', 'last_updated', 'actions',
|
||||
)
|
||||
default_columns = (
|
||||
'pk', 'asn', 'rir', 'site_count', 'provider_count', 'sites', 'description', 'tenant',
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user