mirror of
https://github.com/netbox-community/netbox.git
synced 2026-02-11 03:17:42 +01:00
Compare commits
93 Commits
19720-maca
...
12318-case
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4f2f61c90d | ||
|
|
a34553325e | ||
|
|
06052f8eaa | ||
|
|
dac0a06f4f | ||
|
|
52d4498caf | ||
|
|
5bbab7eb47 | ||
|
|
87505e0bb9 | ||
|
|
7d82493052 | ||
|
|
77c08b7bf9 | ||
|
|
adad7c2209 | ||
|
|
8cc6589a35 | ||
|
|
bee0080917 | ||
|
|
389c44e5d6 | ||
|
|
9cb2c78e34 | ||
|
|
2ae98f0353 | ||
|
|
addda0538f | ||
|
|
c902a1c510 | ||
|
|
f23ee0a46f | ||
|
|
b4acc3fb36 | ||
|
|
5ad6bd88f6 | ||
|
|
2bebfccf9b | ||
|
|
b7cc4c418b | ||
|
|
37a9d03348 | ||
|
|
a69bbcf651 | ||
|
|
2edfde5753 | ||
|
|
cfbd9632ac | ||
|
|
c9386bc9c3 | ||
|
|
c826c5cdb0 | ||
|
|
a4ab4f885d | ||
|
|
61d77dff14 | ||
|
|
24a83acc34 | ||
|
|
dbc71158ec | ||
|
|
a91af996d5 | ||
|
|
f0523611d1 | ||
|
|
7719b98697 | ||
|
|
f383067ecb | ||
|
|
20de263565 | ||
|
|
bb290dc792 | ||
|
|
fcdb7ff6c8 | ||
|
|
5ceb6a60da | ||
|
|
33d4759871 | ||
|
|
2abc5ac69a | ||
|
|
f8c074045f | ||
|
|
4db3d488ad | ||
|
|
b7cae04572 | ||
|
|
51528ae429 | ||
|
|
d5e8480367 | ||
|
|
18a308ae3a | ||
|
|
05e26b82c1 | ||
|
|
d8e4c95bcc | ||
|
|
faa89a53ff | ||
|
|
c63e60a62b | ||
|
|
d18bbe48c1 | ||
|
|
99e367cbaf | ||
|
|
f5ed095738 | ||
|
|
b70f1211ab | ||
|
|
10e8e7b071 | ||
|
|
c770e6b45d | ||
|
|
82db8a9c02 | ||
|
|
bb75bceec5 | ||
|
|
9a68cde95f | ||
|
|
6c723dfb1a | ||
|
|
9b85d92ad0 | ||
|
|
917a2c2618 | ||
|
|
6388705e57 | ||
|
|
ac335c3d87 | ||
|
|
a54c508da2 | ||
|
|
d69042f26e | ||
|
|
f6290dd7af | ||
|
|
c094699dc0 | ||
|
|
adce67a7cf | ||
|
|
f82f084c02 | ||
|
|
43fc7fb58a | ||
|
|
11099b01bb | ||
|
|
5dc48f3a88 | ||
|
|
1ee23ba6fa | ||
|
|
23d7515b41 | ||
|
|
12818f1786 | ||
|
|
f0ae0da1c7 | ||
|
|
c30e4813b7 | ||
|
|
57a7afd548 | ||
|
|
b4eaeead13 | ||
|
|
24fff6bd74 | ||
|
|
b9567208d4 | ||
|
|
cfcea7c941 | ||
|
|
21ba27fb39 | ||
|
|
c0e4d1c1e3 | ||
|
|
d95eaa7ba2 | ||
|
|
5506901867 | ||
|
|
ec9da88134 | ||
|
|
e221f1fffa | ||
|
|
530dad279a | ||
|
|
b1439dc298 |
@@ -15,7 +15,7 @@ body:
|
||||
attributes:
|
||||
label: NetBox version
|
||||
description: What version of NetBox are you currently running?
|
||||
placeholder: v4.4.2
|
||||
placeholder: v4.4.4
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
|
||||
6
.github/ISSUE_TEMPLATE/02-bug_report.yaml
vendored
6
.github/ISSUE_TEMPLATE/02-bug_report.yaml
vendored
@@ -27,7 +27,7 @@ body:
|
||||
attributes:
|
||||
label: NetBox Version
|
||||
description: What version of NetBox are you currently running?
|
||||
placeholder: v4.4.2
|
||||
placeholder: v4.4.4
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
@@ -35,9 +35,9 @@ body:
|
||||
label: Python Version
|
||||
description: What version of Python are you currently running?
|
||||
options:
|
||||
- "3.10"
|
||||
- "3.11"
|
||||
- "3.12"
|
||||
- "3.13"
|
||||
- "3.14"
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
|
||||
8
.github/codeql/codeql-config.yml
vendored
8
.github/codeql/codeql-config.yml
vendored
@@ -1,3 +1,11 @@
|
||||
paths-ignore:
|
||||
# Ignore compiled JS
|
||||
- netbox/project-static/dist
|
||||
|
||||
query-filters:
|
||||
# Exclude py/url-redirection: NetBox uses safe_for_redirect() wrapper function
|
||||
# which validates all redirects via Django's url_has_allowed_host_and_scheme().
|
||||
# CodeQL's taint tracking doesn't recognize wrapper functions without custom
|
||||
# query configuration. See #20484.
|
||||
- exclude:
|
||||
id: py/url-redirection
|
||||
|
||||
2
.github/workflows/ci.yml
vendored
2
.github/workflows/ci.yml
vendored
@@ -31,7 +31,7 @@ jobs:
|
||||
NETBOX_CONFIGURATION: netbox.configuration_testing
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['3.10', '3.11', '3.12']
|
||||
python-version: ['3.12', '3.13']
|
||||
node-version: ['20.x']
|
||||
services:
|
||||
redis:
|
||||
|
||||
@@ -12,9 +12,7 @@ django-cors-headers
|
||||
|
||||
# Runtime UI tool for debugging Django
|
||||
# https://github.com/jazzband/django-debug-toolbar/blob/main/docs/changes.rst
|
||||
# django-debug-toolbar v6.0.0 raises "Attribute Error at /: 'function' object has no attribute 'set'"
|
||||
# see https://github.com/netbox-community/netbox/issues/19974
|
||||
django-debug-toolbar==5.2.0
|
||||
django-debug-toolbar
|
||||
|
||||
# Library for writing reusable URL query filters
|
||||
# https://github.com/carltongibson/django-filter/blob/main/CHANGES.rst
|
||||
@@ -71,7 +69,8 @@ django-timezone-field
|
||||
|
||||
# A REST API framework for Django projects
|
||||
# https://www.django-rest-framework.org/community/release-notes/
|
||||
djangorestframework
|
||||
# TODO: Re-evaluate the monkey-patch of get_unique_validators() before upgrading
|
||||
djangorestframework==3.16.1
|
||||
|
||||
# Sane and flexible OpenAPI 3 schema generation for Django REST framework.
|
||||
# https://github.com/tfranzel/drf-spectacular/blob/master/CHANGELOG.rst
|
||||
@@ -167,7 +166,8 @@ strawberry-graphql-django
|
||||
svgwrite
|
||||
|
||||
# Tabular dataset library (for table-based exports)
|
||||
# https://github.com/jazzband/tablib/blob/master/HISTORY.md
|
||||
# Current: https://github.com/jazzband/tablib/releases
|
||||
# Previous: https://github.com/jazzband/tablib/blob/master/HISTORY.md
|
||||
tablib
|
||||
|
||||
# Timezone data (required by django-timezone-field on Python 3.9+)
|
||||
|
||||
1295
contrib/openapi.json
1295
contrib/openapi.json
File diff suppressed because it is too large
Load Diff
@@ -2,7 +2,7 @@
|
||||
|
||||
## Local Authentication
|
||||
|
||||
Local user accounts and groups can be created in NetBox under the "Authentication" section in the "Admin" menu. This section is available only to users with the "staff" permission enabled.
|
||||
Local user accounts and groups can be created in NetBox under the "Authentication" section in the "Admin" menu.
|
||||
|
||||
At a minimum, each user account must have a username and password set. User accounts may also denote a first name, last name, and email address. [Permissions](../permissions.md) may also be assigned to individual users and/or groups as needed.
|
||||
|
||||
|
||||
@@ -1,5 +1,15 @@
|
||||
# GraphQL API Parameters
|
||||
|
||||
## GRAPHQL_DEFAULT_VERSION
|
||||
|
||||
!!! note "This parameter was introduced in NetBox v4.5."
|
||||
|
||||
Default: `1`
|
||||
|
||||
Designates the default version of the GraphQL API served by `/graphql/`. To access a specific version, append the version number to the URL, e.g. `/graphql/v2/`.
|
||||
|
||||
---
|
||||
|
||||
## GRAPHQL_ENABLED
|
||||
|
||||
!!! tip "Dynamic Configuration Parameter"
|
||||
|
||||
@@ -127,19 +127,3 @@ The list of groups that promote an remote User to Superuser on Login. If group i
|
||||
Default: `[]` (Empty list)
|
||||
|
||||
The list of users that get promoted to Superuser on Login. If user isn't present in list on next Login, the Role gets revoked. (Requires `REMOTE_AUTH_ENABLED` and `REMOTE_AUTH_GROUP_SYNC_ENABLED` )
|
||||
|
||||
---
|
||||
|
||||
## REMOTE_AUTH_STAFF_GROUPS
|
||||
|
||||
Default: `[]` (Empty list)
|
||||
|
||||
The list of groups that promote an remote User to Staff on Login. If group isn't present on next Login, the Role gets revoked. (Requires `REMOTE_AUTH_ENABLED` and `REMOTE_AUTH_GROUP_SYNC_ENABLED` )
|
||||
|
||||
---
|
||||
|
||||
## REMOTE_AUTH_STAFF_USERS
|
||||
|
||||
Default: `[]` (Empty list)
|
||||
|
||||
The list of users that get promoted to Staff on Login. If user isn't present in list on next Login, the Role gets revoked. (Requires `REMOTE_AUTH_ENABLED` and `REMOTE_AUTH_GROUP_SYNC_ENABLED` )
|
||||
|
||||
@@ -23,6 +23,31 @@ ALLOWED_HOSTS = ['*']
|
||||
|
||||
---
|
||||
|
||||
## API_TOKEN_PEPPERS
|
||||
|
||||
!!! info "This parameter was introduced in NetBox v4.5."
|
||||
|
||||
[Cryptographic peppers](https://en.wikipedia.org/wiki/Pepper_(cryptography)) are employed to generate hashes of sensitive values on the server. This parameter defines the peppers used to hash v2 API tokens in NetBox. You must define at least one pepper before creating a v2 API token. See the [API documentation](../integrations/rest-api.md#authentication) for further information about how peppers are used.
|
||||
|
||||
```python
|
||||
API_TOKEN_PEPPERS = {
|
||||
# DO NOT USE THIS EXAMPLE PEPPER IN PRODUCTION
|
||||
1: 'kp7ht*76fiQAhUi5dHfASLlYUE_S^gI^(7J^K5M!LfoH@vl&b_',
|
||||
}
|
||||
```
|
||||
|
||||
!!! warning "Peppers are sensitive"
|
||||
Treat pepper values as extremely sensitive. Consider populating peppers from environment variables at initialization time rather than defining them in the configuration file, if feasible.
|
||||
|
||||
Peppers must be at least 50 characters in length and should comprise a random string with a diverse character set. Consider using the Python script at `$INSTALL_ROOT/netbox/generate_secret_key.py` to generate a pepper value.
|
||||
|
||||
It is recommended to start with a pepper ID of `1`. Additional peppers can be introduced later as needed to begin rotating token hashes.
|
||||
|
||||
!!! tip
|
||||
Although NetBox will run without `API_TOKEN_PEPPERS` defined, the use of v2 API tokens will be unavailable.
|
||||
|
||||
---
|
||||
|
||||
## DATABASE
|
||||
|
||||
!!! warning "Legacy Configuration Parameter"
|
||||
|
||||
@@ -1,16 +1,5 @@
|
||||
# Security & Authentication Parameters
|
||||
|
||||
## ALLOW_TOKEN_RETRIEVAL
|
||||
|
||||
Default: `False`
|
||||
|
||||
!!! note
|
||||
The default value of this parameter changed from `True` to `False` in NetBox v4.3.0.
|
||||
|
||||
If disabled, the values of API tokens will not be displayed after each token's initial creation. A user **must** record the value of a token prior to its creation, or it will be lost. Note that this affects _all_ users, regardless of assigned permissions.
|
||||
|
||||
---
|
||||
|
||||
## ALLOWED_URL_SCHEMES
|
||||
|
||||
!!! tip "Dynamic Configuration Parameter"
|
||||
|
||||
@@ -131,17 +131,6 @@ self.log_info(f"Running as user {username} (IP: {ip_address})...")
|
||||
|
||||
For a complete list of available request parameters, please see the [Django documentation](https://docs.djangoproject.com/en/stable/ref/request-response/).
|
||||
|
||||
## Reading Data from Files
|
||||
|
||||
The Script class provides two convenience methods for reading data from files:
|
||||
|
||||
* `load_yaml`
|
||||
* `load_json`
|
||||
|
||||
These two methods will load data in YAML or JSON format, respectively, from files within the local path (i.e. `SCRIPTS_ROOT`).
|
||||
|
||||
**Note:** These convenience methods are deprecated and will be removed in NetBox v4.4. These only work if running scripts within the local path, they will not work if using a storage other than ScriptFileSystemStorage.
|
||||
|
||||
## Logging
|
||||
|
||||
The Script object provides a set of convenient functions for recording messages at different severity levels:
|
||||
|
||||
@@ -7,7 +7,7 @@ Getting started with NetBox development is pretty straightforward, and should fe
|
||||
* A Linux system or compatible environment
|
||||
* A PostgreSQL server, which can be installed locally [per the documentation](../installation/1-postgresql.md)
|
||||
* A Redis server, which can also be [installed locally](../installation/2-redis.md)
|
||||
* Python 3.10 or later
|
||||
* Python 3.12 or later
|
||||
|
||||
### 1. Fork the Repo
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ NetBox's REST API, powered by the [Django REST Framework](https://www.django-res
|
||||
|
||||
```no-highlight
|
||||
curl -s -X POST \
|
||||
-H "Authorization: Token $TOKEN" \
|
||||
-H "Authorization: Bearer $TOKEN" \
|
||||
-H "Content-Type: application/json" \
|
||||
http://netbox/api/ipam/prefixes/ \
|
||||
--data '{"prefix": "192.0.2.0/24", "site": {"name": "Branch 12"}}'
|
||||
|
||||
@@ -90,3 +90,10 @@ http://netbox:8000/api/extras/config-templates/123/render/ \
|
||||
"bar": 123
|
||||
}'
|
||||
```
|
||||
|
||||
!!! note "Permissions"
|
||||
Rendering configuration templates via the REST API requires appropriate permissions for the relevant object type:
|
||||
|
||||
* To render a device's configuration via `/api/dcim/devices/{id}/render-config/`, assign a permission for "DCIM > Device" with the `render_config` action.
|
||||
* To render a virtual machine's configuration via `/api/virtualization/virtual-machines/{id}/render-config/`, assign a permission for "Virtualization > Virtual Machine" with the `render_config` action.
|
||||
* To render a config template directly via `/api/extras/config-templates/{id}/render/`, assign a permission for "Extras > Config Template" with the `render` action.
|
||||
|
||||
@@ -34,9 +34,6 @@ Sets the default number of rows displayed on paginated tables.
|
||||
### Paginator placement
|
||||
Controls where pagination controls are rendered relative to a table.
|
||||
|
||||
### HTMX navigation (experimental)
|
||||
Enables partial‑page navigation for supported views. Disable this preference if unexpected behavior is observed.
|
||||
|
||||
### Striped table rows
|
||||
Toggles alternating row backgrounds on tables.
|
||||
|
||||
|
||||
@@ -6,8 +6,8 @@ This section of the documentation discusses installing and configuring the NetBo
|
||||
|
||||
Begin by installing all system packages required by NetBox and its dependencies.
|
||||
|
||||
!!! warning "Python 3.10 or later required"
|
||||
NetBox supports Python 3.10, 3.11, and 3.12.
|
||||
!!! warning "Python 3.12 or later required"
|
||||
NetBox supports only Python 3.12 or later.
|
||||
|
||||
```no-highlight
|
||||
sudo apt install -y python3 python3-pip python3-venv python3-dev \
|
||||
@@ -15,7 +15,7 @@ build-essential libxml2-dev libxslt1-dev libffi-dev libpq-dev \
|
||||
libssl-dev zlib1g-dev
|
||||
```
|
||||
|
||||
Before continuing, check that your installed Python version is at least 3.10:
|
||||
Before continuing, check that your installed Python version is at least 3.12:
|
||||
|
||||
```no-highlight
|
||||
python3 -V
|
||||
@@ -120,6 +120,23 @@ If you are not yet sure what the domain name and/or IP address of the NetBox ins
|
||||
ALLOWED_HOSTS = ['*']
|
||||
```
|
||||
|
||||
### API_TOKEN_PEPPERS
|
||||
|
||||
Define at least one random cryptographic pepper, identified by a numeric ID starting at 1. This will be used to generate SHA256 checksums for API tokens.
|
||||
|
||||
```python
|
||||
API_TOKEN_PEPPERS = {
|
||||
# DO NOT USE THIS EXAMPLE PEPPER IN PRODUCTION
|
||||
1: 'kp7ht*76fiQAhUi5dHfASLlYUE_S^gI^(7J^K5M!LfoH@vl&b_',
|
||||
}
|
||||
```
|
||||
|
||||
!!! tip
|
||||
As with [`SECRET_KEY`](#secret_key) below, you can use the `generate_secret_key.py` script to generate a random pepper:
|
||||
```no-highlight
|
||||
python3 ../generate_secret_key.py
|
||||
```
|
||||
|
||||
### DATABASES
|
||||
|
||||
This parameter holds the PostgreSQL database configuration details. The default database must be defined; additional databases may be defined as needed e.g. by plugins.
|
||||
@@ -235,10 +252,10 @@ Once NetBox has been configured, we're ready to proceed with the actual installa
|
||||
sudo /opt/netbox/upgrade.sh
|
||||
```
|
||||
|
||||
Note that **Python 3.10 or later is required** for NetBox v4.0 and later releases. If the default Python installation on your server is set to a lesser version, pass the path to the supported installation as an environment variable named `PYTHON`. (Note that the environment variable must be passed _after_ the `sudo` command.)
|
||||
Note that **Python 3.12 or later is required** for NetBox v4.5 and later releases. If the default Python installation on your server is set to a lesser version, pass the path to the supported installation as an environment variable named `PYTHON`. (Note that the environment variable must be passed _after_ the `sudo` command.)
|
||||
|
||||
```no-highlight
|
||||
sudo PYTHON=/usr/bin/python3.10 /opt/netbox/upgrade.sh
|
||||
sudo PYTHON=/usr/bin/python3.12 /opt/netbox/upgrade.sh
|
||||
```
|
||||
|
||||
!!! note
|
||||
|
||||
@@ -60,6 +60,3 @@ You should see output similar to the following:
|
||||
If the NetBox service fails to start, issue the command `journalctl -eu netbox` to check for log messages that may indicate the problem.
|
||||
|
||||
Once you've verified that the WSGI workers are up and running, move on to HTTP server setup.
|
||||
|
||||
!!! note
|
||||
There is a bug in the current stable release of gunicorn (v21.2.0) where automatic restarts of the worker processes can result in 502 errors under heavy load. (See [gunicorn bug #3038](https://github.com/benoitc/gunicorn/issues/3038) for more detail.) Users who encounter this issue may opt to downgrade to an earlier, unaffected release of gunicorn (`pip install gunicorn==20.1.0`). Note, however, that this earlier release does not officially support Python 3.11.
|
||||
|
||||
@@ -121,7 +121,6 @@ AUTH_LDAP_MIRROR_GROUPS = True
|
||||
# Define special user types using groups. Exercise great caution when assigning superuser status.
|
||||
AUTH_LDAP_USER_FLAGS_BY_GROUP = {
|
||||
"is_active": "cn=active,ou=groups,dc=example,dc=com",
|
||||
"is_staff": "cn=staff,ou=groups,dc=example,dc=com",
|
||||
"is_superuser": "cn=superuser,ou=groups,dc=example,dc=com"
|
||||
}
|
||||
|
||||
@@ -134,7 +133,6 @@ AUTH_LDAP_CACHE_TIMEOUT = 3600
|
||||
```
|
||||
|
||||
* `is_active` - All users must be mapped to at least this group to enable authentication. Without this, users cannot log in.
|
||||
* `is_staff` - Users mapped to this group are enabled for access to the administration tools; this is the equivalent of checking the "staff status" box on a manually created user. This doesn't grant any specific permissions.
|
||||
* `is_superuser` - Users mapped to this group will be granted superuser status. Superusers are implicitly granted all permissions.
|
||||
|
||||
!!! warning
|
||||
@@ -248,7 +246,6 @@ AUTH_LDAP_MIRROR_GROUPS = True
|
||||
# Define special user types using groups. Exercise great caution when assigning superuser status.
|
||||
AUTH_LDAP_USER_FLAGS_BY_GROUP = {
|
||||
"is_active": "cn=active,ou=groups,dc=example,dc=com",
|
||||
"is_staff": "cn=staff,ou=groups,dc=example,dc=com",
|
||||
"is_superuser": "cn=superuser,ou=groups,dc=example,dc=com"
|
||||
}
|
||||
|
||||
|
||||
@@ -27,7 +27,7 @@ The following sections detail how to set up a new instance of NetBox:
|
||||
|
||||
| Dependency | Supported Versions |
|
||||
|------------|--------------------|
|
||||
| Python | 3.10, 3.11, 3.12 |
|
||||
| Python | 3.12, 3.13, 3.14 |
|
||||
| PostgreSQL | 14+ |
|
||||
| Redis | 4.0+ |
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@ NetBox requires the following dependencies:
|
||||
|
||||
| Dependency | Supported Versions |
|
||||
|------------|--------------------|
|
||||
| Python | 3.10, 3.11, 3.12 |
|
||||
| Python | 3.12, 3.13, 3.14 |
|
||||
| PostgreSQL | 14+ |
|
||||
| Redis | 4.0+ |
|
||||
|
||||
@@ -27,6 +27,7 @@ NetBox requires the following dependencies:
|
||||
|
||||
| NetBox Version | Python min | Python max | PostgreSQL min | Redis min | Documentation |
|
||||
|:--------------:|:----------:|:----------:|:--------------:|:---------:|:-----------------------------------------------------------------------------------------:|
|
||||
| 4.5 | 3.12 | 3.14 | 14 | 4.0 | [Link](https://github.com/netbox-community/netbox/blob/v4.5.0/docs/installation/index.md) |
|
||||
| 4.4 | 3.10 | 3.12 | 14 | 4.0 | [Link](https://github.com/netbox-community/netbox/blob/v4.4.0/docs/installation/index.md) |
|
||||
| 4.3 | 3.10 | 3.12 | 14 | 4.0 | [Link](https://github.com/netbox-community/netbox/blob/v4.3.0/docs/installation/index.md) |
|
||||
| 4.2 | 3.10 | 3.12 | 13 | 4.0 | [Link](https://github.com/netbox-community/netbox/blob/v4.2.0/docs/installation/index.md) |
|
||||
@@ -130,7 +131,7 @@ sudo ./upgrade.sh
|
||||
If the default version of Python is not at least 3.10, you'll need to pass the path to a supported Python version as an environment variable when calling the upgrade script. For example:
|
||||
|
||||
```no-highlight
|
||||
sudo PYTHON=/usr/bin/python3.10 ./upgrade.sh
|
||||
sudo PYTHON=/usr/bin/python3.12 ./upgrade.sh
|
||||
```
|
||||
|
||||
!!! note
|
||||
|
||||
@@ -80,7 +80,7 @@ Likewise, the site, rack, and device objects are located under the "DCIM" applic
|
||||
|
||||
The full hierarchy of available endpoints can be viewed by navigating to the API root in a web browser.
|
||||
|
||||
Each model generally has two views associated with it: a list view and a detail view. The list view is used to retrieve a list of multiple objects and to create new objects. The detail view is used to retrieve, update, or delete an single existing object. All objects are referenced by their numeric primary key (`id`).
|
||||
Each model generally has two views associated with it: a list view and a detail view. The list view is used to retrieve a list of multiple objects and to create new objects. The detail view is used to retrieve, update, or delete a single existing object. All objects are referenced by their numeric primary key (`id`).
|
||||
|
||||
* `/api/dcim/devices/` - List existing devices or create a new device
|
||||
* `/api/dcim/devices/123/` - Retrieve, update, or delete the device with ID 123
|
||||
@@ -653,18 +653,22 @@ The NetBox REST API primarily employs token-based authentication. For convenienc
|
||||
|
||||
### Tokens
|
||||
|
||||
A token is a unique identifier mapped to a NetBox user account. Each user may have one or more tokens which he or she can use for authentication when making REST API requests. To create a token, navigate to the API tokens page under your user profile.
|
||||
A token is a secret, unique identifier mapped to a NetBox user account. Each user may have one or more tokens which he or she can use for authentication when making REST API requests. To create a token, navigate to the API tokens page under your user profile. When creating a token, NetBox will automatically populate a randomly-generated token value.
|
||||
|
||||
!!! note "Tokens cannot be retrieved once created"
|
||||
Once a token has been created, its plaintext value cannot be retrieved. For this reason, you must take care to securely record the token locally immediately upon its creation. If a token plaintext is lost, it cannot be recovered: A new token must be created.
|
||||
|
||||
By default, all users can create and manage their own REST API tokens under the user control panel in the UI or via the REST API. This ability can be disabled by overriding the [`DEFAULT_PERMISSIONS`](../configuration/security.md#default_permissions) configuration parameter.
|
||||
|
||||
Each token contains a 160-bit key represented as 40 hexadecimal characters. When creating a token, you'll typically leave the key field blank so that a random key will be automatically generated. However, NetBox allows you to specify a key in case you need to restore a previously deleted token to operation.
|
||||
|
||||
Additionally, a token can be set to expire at a specific time. This can be useful if an external client needs to be granted temporary access to NetBox.
|
||||
|
||||
!!! info "Restricting Token Retrieval"
|
||||
The ability to retrieve the key value of a previously-created API token can be restricted by disabling the [`ALLOW_TOKEN_RETRIEVAL`](../configuration/security.md#allow_token_retrieval) configuration parameter.
|
||||
#### v1 and v2 Tokens
|
||||
|
||||
### Restricting Write Operations
|
||||
Beginning with NetBox v4.5, two versions of API token are supported, denoted as v1 and v2. Users are strongly encouraged to create only v2 tokens and to discontinue the use of v1 tokens. Support for v1 tokens will be removed in a future NetBox release.
|
||||
|
||||
v2 API tokens offer much stronger security. The token plaintext given at creation time is hashed together with a configured [cryptographic pepper](../configuration/required-parameters.md#api_token_peppers) to generate a unique checksum. This checksum is irreversible; the token plaintext is never stored on the server and thus cannot be retrieved even with database-level access.
|
||||
|
||||
#### Restricting Write Operations
|
||||
|
||||
By default, a token can be used to perform all actions via the API that a user would be permitted to do via the web UI. Deselecting the "write enabled" option will restrict API requests made with the token to read operations (e.g. GET) only.
|
||||
|
||||
@@ -681,10 +685,22 @@ It is possible to provision authentication tokens for other users via the REST A
|
||||
|
||||
### Authenticating to the API
|
||||
|
||||
An authentication token is attached to a request by setting the `Authorization` header to the string `Token` followed by a space and the user's token:
|
||||
An authentication token is included with a request in its `Authorization` header. The format of the header value depends on the version of token in use. v2 tokens use the following form, concatenating the token's prefix (`nbt_`) and key with its plaintext value, separated by a period:
|
||||
|
||||
```
|
||||
$ curl -H "Authorization: Token $TOKEN" \
|
||||
Authorization: Bearer nbt_<key>.<token>
|
||||
```
|
||||
|
||||
Legacy v1 tokens use the prefix `Token` rather than `Bearer`, and include only the token plaintext. (v1 tokens do not have a key.)
|
||||
|
||||
```
|
||||
Authorization: Token <token>
|
||||
```
|
||||
|
||||
Below is an example REST API request utilizing a v2 token.
|
||||
|
||||
```
|
||||
$ curl -H "Authorization: Bearer nbt_4F9DAouzURLb.zjebxBPzICiPbWz0Wtx0fTL7bCKXKGTYhNzkgC2S" \
|
||||
-H "Accept: application/json; indent=4" \
|
||||
https://netbox/api/dcim/sites/
|
||||
{
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# Filters & Filter Sets
|
||||
|
||||
Filter sets define the mechanisms available for filtering or searching through a set of objects in NetBox. For instance, sites can be filtered by their parent region or group, status, facility ID, and so on. The same filter set is used consistently for a model whether the request is made via the UI or REST API. (Note that the GraphQL API uses a separate filter class.) NetBox employs the [django-filters2](https://django-tables2.readthedocs.io/en/latest/) library to define filter sets.
|
||||
Filter sets define the mechanisms available for filtering or searching through a set of objects in NetBox. For instance, sites can be filtered by their parent region or group, status, facility ID, and so on. The same filter set is used consistently for a model whether the request is made via the UI or REST API. (Note that the GraphQL API uses a separate filter class.) NetBox employs the [django-filter](https://django-filter.readthedocs.io/en/stable/) library to define filter sets.
|
||||
|
||||
## FilterSet Classes
|
||||
|
||||
|
||||
@@ -173,12 +173,12 @@ classifiers=[
|
||||
'Intended Audience :: Developers',
|
||||
'Natural Language :: English',
|
||||
"Programming Language :: Python :: 3 :: Only",
|
||||
'Programming Language :: Python :: 3.10',
|
||||
'Programming Language :: Python :: 3.11',
|
||||
'Programming Language :: Python :: 3.12',
|
||||
'Programming Language :: Python :: 3.13',
|
||||
'Programming Language :: Python :: 3.14',
|
||||
]
|
||||
|
||||
requires-python = ">=3.10.0"
|
||||
requires-python = ">=3.12.0"
|
||||
|
||||
```
|
||||
|
||||
@@ -195,7 +195,7 @@ python3 -m venv ~/.virtualenvs/my_plugin
|
||||
You can make NetBox available within this environment by creating a path file pointing to its location. This will add NetBox to the Python path upon activation. (Be sure to adjust the command below to specify your actual virtual environment path, Python version, and NetBox installation.)
|
||||
|
||||
```shell
|
||||
echo /opt/netbox/netbox > $VENV/lib/python3.10/site-packages/netbox.pth
|
||||
echo /opt/netbox/netbox > $VENV/lib/python3.12/site-packages/netbox.pth
|
||||
```
|
||||
|
||||
## Development Installation
|
||||
|
||||
@@ -64,14 +64,17 @@ item1 = PluginMenuItem(
|
||||
|
||||
A `PluginMenuItem` has the following attributes:
|
||||
|
||||
| Attribute | Required | Description |
|
||||
|-----------------|----------|----------------------------------------------------------------------------------------------------------|
|
||||
| `link` | Yes | Name of the URL path to which this menu item links |
|
||||
| `link_text` | Yes | The text presented to the user |
|
||||
| `permissions` | - | A list of permissions required to display this link |
|
||||
| `auth_required` | - | Display only for authenticated users |
|
||||
| `staff_only` | - | Display only for users who have `is_staff` set to true (any specified permissions will also be required) |
|
||||
| `buttons` | - | An iterable of PluginMenuButton instances to include |
|
||||
| Attribute | Required | Description |
|
||||
|-----------------|----------|------------------------------------------------------|
|
||||
| `link` | Yes | Name of the URL path to which this menu item links |
|
||||
| `link_text` | Yes | The text presented to the user |
|
||||
| `permissions` | - | A list of permissions required to display this link |
|
||||
| `auth_required` | - | Display only for authenticated users |
|
||||
| `staff_only` | - | Display only for superusers |
|
||||
| `buttons` | - | An iterable of PluginMenuButton instances to include |
|
||||
|
||||
!!! note "Changed in NetBox v4.5"
|
||||
In releases prior to NetBox v4.5, `staff_only` restricted display of a menu item to only users with `is_staff` set to True. In NetBox v4.5, the `is_staff` flag was removed from the user model. Menu items with `staff_only` set to True are now displayed only for superusers.
|
||||
|
||||
## Menu Buttons
|
||||
|
||||
|
||||
@@ -1,5 +1,44 @@
|
||||
# NetBox v4.4
|
||||
|
||||
## v4.4.4 (2025-10-15)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* [#20554](https://github.com/netbox-community/netbox/issues/20554) - Fix generic relation filters to accept `<app>.<model>` format matching POST requests
|
||||
* [#20574](https://github.com/netbox-community/netbox/issues/20574) - Fix excessive storage initialization overhead when listing scripts with remote backends
|
||||
* [#20584](https://github.com/netbox-community/netbox/issues/20584) - Enforce PoE mode requirement on interface templates when PoE type is set
|
||||
* [#20585](https://github.com/netbox-community/netbox/issues/20585) - Fix API schema generation crash for models with single-field UniqueConstraints
|
||||
* [#20587](https://github.com/netbox-community/netbox/issues/20587) - Fix upgrade.sh failure when removing stale content types
|
||||
|
||||
---
|
||||
|
||||
## v4.4.3 (2025-10-14)
|
||||
|
||||
### Enhancements
|
||||
|
||||
* [#20426](https://github.com/netbox-community/netbox/issues/20426) - Add a copy-to-clipboard button for custom script output
|
||||
* [#20516](https://github.com/netbox-community/netbox/issues/20516) - Improve rendering of VLAN ID ranges in VLAN group tables
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* [#19302](https://github.com/netbox-community/netbox/issues/19302) - Fix uniqueness validation in REST API for nullable fields
|
||||
* [#19615](https://github.com/netbox-community/netbox/issues/19615) - Fix support for static file parameters in templates when external storage is in use
|
||||
* [#19818](https://github.com/netbox-community/netbox/issues/19818) - Hide primary IP assignment fields when creating a new virtual machine in the UI
|
||||
* [#19825](https://github.com/netbox-community/netbox/issues/19825) - Prevent cache for config revisions from being erroneously overwritten when debugging is enabled
|
||||
* [#20140](https://github.com/netbox-community/netbox/issues/20140) - Changing a site's region or group should update any associated circuit terminations
|
||||
* [#20156](https://github.com/netbox-community/netbox/issues/20156) - Fix display of rack elevation labels
|
||||
* [#20290](https://github.com/netbox-community/netbox/issues/20290) - Fix migration error when upgrading to NetBox v4.4 from releases earlier than v4.3
|
||||
* [#20471](https://github.com/netbox-community/netbox/issues/20471) - Saving an unmodified VLAN group should not generate a change record
|
||||
* [#20475](https://github.com/netbox-community/netbox/issues/20475) - Collapse singleton VLAN IDs in VLAN group display
|
||||
* [#20494](https://github.com/netbox-community/netbox/issues/20494) - Correct OpenAPI schema definition for `IntegerRangeSerializer`
|
||||
* [#20496](https://github.com/netbox-community/netbox/issues/20496) - REST API should always honor `MAX_PAGE_SIZE` value
|
||||
* [#20497](https://github.com/netbox-community/netbox/issues/20497) - Fix filtering of VLAN groups by VLAN ID range in GraphQL API
|
||||
* [#20507](https://github.com/netbox-community/netbox/issues/20507) - Fix support for fetching ASN contacts via GraphQL API
|
||||
* [#20523](https://github.com/netbox-community/netbox/issues/20523) - Hide password change form for users authenticated via SSO
|
||||
* [#20542](https://github.com/netbox-community/netbox/issues/20542) - Fix the creation of MAC addresses using the "quick add" form
|
||||
|
||||
---
|
||||
|
||||
## v4.4.2 (2025-09-30)
|
||||
|
||||
### Enhancements
|
||||
|
||||
@@ -1,57 +0,0 @@
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
from account.models import UserToken
|
||||
from netbox.tables import NetBoxTable, columns
|
||||
|
||||
__all__ = (
|
||||
'UserTokenTable',
|
||||
)
|
||||
|
||||
|
||||
TOKEN = """<samp><span id="token_{{ record.pk }}">{{ record }}</span></samp>"""
|
||||
|
||||
ALLOWED_IPS = """{{ value|join:", " }}"""
|
||||
|
||||
COPY_BUTTON = """
|
||||
{% if settings.ALLOW_TOKEN_RETRIEVAL %}
|
||||
{% copy_content record.pk prefix="token_" color="success" %}
|
||||
{% endif %}
|
||||
"""
|
||||
|
||||
|
||||
class UserTokenTable(NetBoxTable):
|
||||
"""
|
||||
Table for users to manager their own API tokens under account views.
|
||||
"""
|
||||
key = columns.TemplateColumn(
|
||||
verbose_name=_('Key'),
|
||||
template_code=TOKEN,
|
||||
)
|
||||
write_enabled = columns.BooleanColumn(
|
||||
verbose_name=_('Write Enabled')
|
||||
)
|
||||
created = columns.DateTimeColumn(
|
||||
timespec='minutes',
|
||||
verbose_name=_('Created'),
|
||||
)
|
||||
expires = columns.DateTimeColumn(
|
||||
timespec='minutes',
|
||||
verbose_name=_('Expires'),
|
||||
)
|
||||
last_used = columns.DateTimeColumn(
|
||||
verbose_name=_('Last Used'),
|
||||
)
|
||||
allowed_ips = columns.TemplateColumn(
|
||||
verbose_name=_('Allowed IPs'),
|
||||
template_code=ALLOWED_IPS
|
||||
)
|
||||
actions = columns.ActionsColumn(
|
||||
actions=('edit', 'delete'),
|
||||
extra_buttons=COPY_BUTTON
|
||||
)
|
||||
|
||||
class Meta(NetBoxTable.Meta):
|
||||
model = UserToken
|
||||
fields = (
|
||||
'pk', 'id', 'key', 'description', 'write_enabled', 'created', 'expires', 'last_used', 'allowed_ips',
|
||||
)
|
||||
@@ -26,8 +26,9 @@ from extras.tables import BookmarkTable, NotificationTable, SubscriptionTable
|
||||
from netbox.authentication import get_auth_backend_display, get_saml_idps
|
||||
from netbox.config import get_config
|
||||
from netbox.views import generic
|
||||
from users import forms, tables
|
||||
from users import forms
|
||||
from users.models import UserConfig
|
||||
from users.tables import TokenTable
|
||||
from utilities.request import safe_for_redirect
|
||||
from utilities.string import remove_linebreaks
|
||||
from utilities.views import register_model_view
|
||||
@@ -328,7 +329,8 @@ class UserTokenListView(LoginRequiredMixin, View):
|
||||
|
||||
def get(self, request):
|
||||
tokens = UserToken.objects.filter(user=request.user)
|
||||
table = tables.UserTokenTable(tokens)
|
||||
table = TokenTable(tokens)
|
||||
table.columns.hide('user')
|
||||
table.configure(request)
|
||||
|
||||
return render(request, 'account/token_list.html', {
|
||||
@@ -343,11 +345,9 @@ class UserTokenView(LoginRequiredMixin, View):
|
||||
|
||||
def get(self, request, pk):
|
||||
token = get_object_or_404(UserToken.objects.filter(user=request.user), pk=pk)
|
||||
key = token.key if settings.ALLOW_TOKEN_RETRIEVAL else None
|
||||
|
||||
return render(request, 'account/token.html', {
|
||||
'object': token,
|
||||
'key': key,
|
||||
})
|
||||
|
||||
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
from netbox import denormalized
|
||||
|
||||
|
||||
class CircuitsConfig(AppConfig):
|
||||
name = "circuits"
|
||||
@@ -8,6 +10,16 @@ class CircuitsConfig(AppConfig):
|
||||
def ready(self):
|
||||
from netbox.models.features import register_models
|
||||
from . import signals, search # noqa: F401
|
||||
from .models import CircuitTermination
|
||||
|
||||
# Register models
|
||||
register_models(*self.get_models())
|
||||
|
||||
denormalized.register(CircuitTermination, '_site', {
|
||||
'_region': 'region',
|
||||
'_site_group': 'group',
|
||||
})
|
||||
|
||||
denormalized.register(CircuitTermination, '_location', {
|
||||
'_site': 'site',
|
||||
})
|
||||
|
||||
97
netbox/circuits/migrations/0053_ci_collations.py
Normal file
97
netbox/circuits/migrations/0053_ci_collations.py
Normal file
@@ -0,0 +1,97 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
PATTERN_OPS_INDEXES = [
|
||||
'circuits_circuitgroup_name_ec8ac1e5_like',
|
||||
'circuits_circuitgroup_slug_61ca866b_like',
|
||||
'circuits_circuittype_name_8256ea9a_like',
|
||||
'circuits_circuittype_slug_9b4b3cf9_like',
|
||||
'circuits_provider_name_8f2514f5_like',
|
||||
'circuits_provider_slug_c3c0aa10_like',
|
||||
'circuits_virtualcircuittype_name_5184db16_like',
|
||||
'circuits_virtualcircuittype_slug_75d5c661_like',
|
||||
]
|
||||
|
||||
|
||||
def remove_indexes(apps, schema_editor):
|
||||
for idx in PATTERN_OPS_INDEXES:
|
||||
schema_editor.execute(f'DROP INDEX IF EXISTS {idx}')
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('circuits', '0052_extend_circuit_abs_distance_upper_limit'),
|
||||
('dcim', '0217_ci_collations'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
code=remove_indexes,
|
||||
reverse_code=migrations.RunPython.noop,
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='circuit',
|
||||
name='cid',
|
||||
field=models.CharField(db_collation='case_insensitive', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='circuitgroup',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='circuitgroup',
|
||||
name='slug',
|
||||
field=models.SlugField(db_collation='case_insensitive', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='circuittype',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='circuittype',
|
||||
name='slug',
|
||||
field=models.SlugField(db_collation='case_insensitive', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='provider',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='provider',
|
||||
name='slug',
|
||||
field=models.SlugField(db_collation='case_insensitive', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='provideraccount',
|
||||
name='account',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='provideraccount',
|
||||
name='name',
|
||||
field=models.CharField(blank=True, db_collation='ci_natural_sort', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='providernetwork',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='virtualcircuit',
|
||||
name='cid',
|
||||
field=models.CharField(db_collation='case_insensitive', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='virtualcircuittype',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='virtualcircuittype',
|
||||
name='slug',
|
||||
field=models.SlugField(db_collation='case_insensitive', max_length=100, unique=True),
|
||||
),
|
||||
]
|
||||
@@ -41,9 +41,10 @@ class Circuit(ContactsMixin, ImageAttachmentsMixin, DistanceMixin, PrimaryModel)
|
||||
ProviderAccount. Circuit port speed and commit rate are measured in Kbps.
|
||||
"""
|
||||
cid = models.CharField(
|
||||
max_length=100,
|
||||
verbose_name=_('circuit ID'),
|
||||
help_text=_('Unique circuit ID')
|
||||
max_length=100,
|
||||
db_collation='case_insensitive',
|
||||
help_text=_('Unique circuit ID'),
|
||||
)
|
||||
provider = models.ForeignKey(
|
||||
to='circuits.Provider',
|
||||
|
||||
@@ -21,13 +21,14 @@ class Provider(ContactsMixin, PrimaryModel):
|
||||
verbose_name=_('name'),
|
||||
max_length=100,
|
||||
unique=True,
|
||||
db_collation='ci_natural_sort',
|
||||
help_text=_('Full name of the provider'),
|
||||
db_collation="natural_sort"
|
||||
)
|
||||
slug = models.SlugField(
|
||||
verbose_name=_('slug'),
|
||||
max_length=100,
|
||||
unique=True
|
||||
unique=True,
|
||||
db_collation='case_insensitive',
|
||||
)
|
||||
asns = models.ManyToManyField(
|
||||
to='ipam.ASN',
|
||||
@@ -56,13 +57,15 @@ class ProviderAccount(ContactsMixin, PrimaryModel):
|
||||
related_name='accounts'
|
||||
)
|
||||
account = models.CharField(
|
||||
verbose_name=_('account ID'),
|
||||
max_length=100,
|
||||
verbose_name=_('account ID')
|
||||
db_collation='ci_natural_sort',
|
||||
)
|
||||
name = models.CharField(
|
||||
verbose_name=_('name'),
|
||||
max_length=100,
|
||||
blank=True
|
||||
db_collation='ci_natural_sort',
|
||||
blank=True,
|
||||
)
|
||||
|
||||
clone_fields = ('provider', )
|
||||
@@ -97,7 +100,7 @@ class ProviderNetwork(PrimaryModel):
|
||||
name = models.CharField(
|
||||
verbose_name=_('name'),
|
||||
max_length=100,
|
||||
db_collation="natural_sort"
|
||||
db_collation='ci_natural_sort',
|
||||
)
|
||||
provider = models.ForeignKey(
|
||||
to='circuits.Provider',
|
||||
|
||||
@@ -34,9 +34,10 @@ class VirtualCircuit(PrimaryModel):
|
||||
A virtual connection between two or more endpoints, delivered across one or more physical circuits.
|
||||
"""
|
||||
cid = models.CharField(
|
||||
max_length=100,
|
||||
verbose_name=_('circuit ID'),
|
||||
help_text=_('Unique circuit ID')
|
||||
max_length=100,
|
||||
db_collation='case_insensitive',
|
||||
help_text=_('Unique circuit ID'),
|
||||
)
|
||||
provider_network = models.ForeignKey(
|
||||
to='circuits.ProviderNetwork',
|
||||
|
||||
@@ -282,18 +282,18 @@ class FixSerializedPKRelatedField(OpenApiSerializerFieldExtension):
|
||||
|
||||
class FixIntegerRangeSerializerSchema(OpenApiSerializerExtension):
|
||||
target_class = 'netbox.api.fields.IntegerRangeSerializer'
|
||||
match_subclasses = True
|
||||
|
||||
def map_serializer(self, auto_schema: 'AutoSchema', direction: Direction) -> _SchemaType:
|
||||
# One range = two integers; many=True will wrap this in an outer array
|
||||
return {
|
||||
'type': 'array',
|
||||
'items': {
|
||||
'type': 'array',
|
||||
'items': {
|
||||
'type': 'integer',
|
||||
},
|
||||
'minItems': 2,
|
||||
'maxItems': 2,
|
||||
'type': 'integer',
|
||||
},
|
||||
'minItems': 2,
|
||||
'maxItems': 2,
|
||||
'example': [10, 20],
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -9,7 +9,6 @@ from drf_spectacular.utils import OpenApiParameter, extend_schema
|
||||
from rest_framework import viewsets
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
from rest_framework.permissions import IsAdminUser
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.routers import APIRootView
|
||||
from rest_framework.viewsets import ReadOnlyModelViewSet
|
||||
@@ -24,7 +23,7 @@ from netbox.api.authentication import IsAuthenticatedOrLoginNotRequired
|
||||
from netbox.api.metadata import ContentTypeMetadata
|
||||
from netbox.api.pagination import LimitOffsetListPagination
|
||||
from netbox.api.viewsets import NetBoxModelViewSet, NetBoxReadOnlyModelViewSet
|
||||
|
||||
from utilities.api import IsSuperuser
|
||||
from . import serializers
|
||||
|
||||
|
||||
@@ -100,7 +99,7 @@ class BaseRQViewSet(viewsets.ViewSet):
|
||||
"""
|
||||
Base class for RQ view sets. Provides a list() method. Subclasses must implement get_data().
|
||||
"""
|
||||
permission_classes = [IsAdminUser]
|
||||
permission_classes = [IsSuperuser]
|
||||
serializer_class = None
|
||||
|
||||
def get_data(self):
|
||||
|
||||
@@ -80,6 +80,7 @@ class JobFilterSet(BaseFilterSet):
|
||||
method='search',
|
||||
label=_('Search'),
|
||||
)
|
||||
object_type = ContentTypeFilter()
|
||||
created = django_filters.DateTimeFilter()
|
||||
created__before = django_filters.DateTimeFilter(
|
||||
field_name='created',
|
||||
@@ -169,6 +170,7 @@ class ObjectChangeFilterSet(BaseFilterSet):
|
||||
changed_object_type_id = django_filters.ModelMultipleChoiceFilter(
|
||||
queryset=ContentType.objects.all()
|
||||
)
|
||||
related_object_type = ContentTypeFilter()
|
||||
user_id = django_filters.ModelMultipleChoiceFilter(
|
||||
queryset=User.objects.all(),
|
||||
label=_('User (ID)'),
|
||||
|
||||
@@ -3,12 +3,12 @@ from typing import Annotated, List, TYPE_CHECKING
|
||||
import strawberry
|
||||
import strawberry_django
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from strawberry.types import Info
|
||||
|
||||
from core.models import ObjectChange
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from core.graphql.types import DataFileType, DataSourceType
|
||||
from netbox.core.graphql.types import ObjectChangeType
|
||||
from core.graphql.types import DataFileType, DataSourceType, ObjectChangeType
|
||||
|
||||
__all__ = (
|
||||
'ChangelogMixin',
|
||||
@@ -20,7 +20,7 @@ __all__ = (
|
||||
class ChangelogMixin:
|
||||
|
||||
@strawberry_django.field
|
||||
def changelog(self, info) -> List[Annotated["ObjectChangeType", strawberry.lazy('.types')]]: # noqa: F821
|
||||
def changelog(self, info: Info) -> List[Annotated['ObjectChangeType', strawberry.lazy('.types')]]: # noqa: F821
|
||||
content_type = ContentType.objects.get_for_model(self)
|
||||
object_changes = ObjectChange.objects.filter(
|
||||
changed_object_type=content_type,
|
||||
@@ -31,5 +31,5 @@ class ChangelogMixin:
|
||||
|
||||
@strawberry.type
|
||||
class SyncedDataMixin:
|
||||
data_source: Annotated["DataSourceType", strawberry.lazy('core.graphql.types')] | None
|
||||
data_file: Annotated["DataFileType", strawberry.lazy('core.graphql.types')] | None
|
||||
data_source: Annotated['DataSourceType', strawberry.lazy('core.graphql.types')] | None
|
||||
data_file: Annotated['DataFileType', strawberry.lazy('core.graphql.types')] | None
|
||||
|
||||
48
netbox/core/migrations/0019_configrevision_active.py
Normal file
48
netbox/core/migrations/0019_configrevision_active.py
Normal file
@@ -0,0 +1,48 @@
|
||||
# Generated by Django 5.2.5 on 2025-09-09 16:48
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
def get_active(apps, schema_editor):
|
||||
from django.core.cache import cache
|
||||
ConfigRevision = apps.get_model('core', 'ConfigRevision')
|
||||
version = None
|
||||
revision = None
|
||||
|
||||
# Try and get the latest version from cache
|
||||
try:
|
||||
version = cache.get('config_version')
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# If there is a version in cache, attempt to set revision to the current version from cache
|
||||
# If the version in cache does not exist or there is no version, try the lastest revision in the database
|
||||
if not version or (version and not (revision := ConfigRevision.objects.filter(pk=version).first())):
|
||||
revision = ConfigRevision.objects.order_by('-created').first()
|
||||
|
||||
# If there is a revision set, set the active revision
|
||||
if revision:
|
||||
revision.active = True
|
||||
revision.save()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0018_concrete_objecttype'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='configrevision',
|
||||
name='active',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.RunPython(code=get_active, reverse_code=migrations.RunPython.noop),
|
||||
migrations.AddConstraint(
|
||||
model_name='configrevision',
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(('active', True)), fields=('active',), name='unique_active_config_revision'
|
||||
),
|
||||
),
|
||||
]
|
||||
30
netbox/core/migrations/0020_ci_collations.py
Normal file
30
netbox/core/migrations/0020_ci_collations.py
Normal file
@@ -0,0 +1,30 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
PATTERN_OPS_INDEXES = [
|
||||
'core_datasource_name_17788499_like',
|
||||
]
|
||||
|
||||
|
||||
def remove_indexes(apps, schema_editor):
|
||||
for idx in PATTERN_OPS_INDEXES:
|
||||
schema_editor.execute(f'DROP INDEX IF EXISTS {idx}')
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0019_configrevision_active'),
|
||||
('dcim', '0217_ci_collations'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
code=remove_indexes,
|
||||
reverse_code=migrations.RunPython.noop,
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='datasource',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100, unique=True),
|
||||
),
|
||||
]
|
||||
@@ -14,6 +14,9 @@ class ConfigRevision(models.Model):
|
||||
"""
|
||||
An atomic revision of NetBox's configuration.
|
||||
"""
|
||||
active = models.BooleanField(
|
||||
default=False
|
||||
)
|
||||
created = models.DateTimeField(
|
||||
verbose_name=_('created'),
|
||||
auto_now_add=True
|
||||
@@ -35,6 +38,13 @@ class ConfigRevision(models.Model):
|
||||
ordering = ['-created']
|
||||
verbose_name = _('config revision')
|
||||
verbose_name_plural = _('config revisions')
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=('active',),
|
||||
condition=models.Q(active=True),
|
||||
name='unique_active_config_revision',
|
||||
)
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
if not self.pk:
|
||||
@@ -59,8 +69,13 @@ class ConfigRevision(models.Model):
|
||||
"""
|
||||
cache.set('config', self.data, None)
|
||||
cache.set('config_version', self.pk, None)
|
||||
|
||||
# Set all instances of ConfigRevision to false and set this instance to true
|
||||
ConfigRevision.objects.all().update(active=False)
|
||||
ConfigRevision.objects.filter(pk=self.pk).update(active=True)
|
||||
|
||||
activate.alters_data = True
|
||||
|
||||
@property
|
||||
def is_active(self):
|
||||
return cache.get('config_version') == self.pk
|
||||
return self.active
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
# TODO: Remove this module in NetBox v4.5
|
||||
# Provided for backward compatibility
|
||||
from .object_types import *
|
||||
@@ -38,7 +38,8 @@ class DataSource(JobsMixin, PrimaryModel):
|
||||
name = models.CharField(
|
||||
verbose_name=_('name'),
|
||||
max_length=100,
|
||||
unique=True
|
||||
unique=True,
|
||||
db_collation='ci_natural_sort',
|
||||
)
|
||||
type = models.CharField(
|
||||
verbose_name=_('type'),
|
||||
|
||||
@@ -5,7 +5,7 @@ from django.contrib.contenttypes.models import ContentType
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.contrib.postgres.indexes import GinIndex
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.db import models
|
||||
from django.db import connection, models
|
||||
from django.db.models import Q
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
@@ -66,6 +66,14 @@ class ObjectTypeManager(models.Manager):
|
||||
"""
|
||||
from netbox.models.features import get_model_features, model_is_public
|
||||
|
||||
# TODO: Remove this in NetBox v5.0
|
||||
# If the ObjectType table has not yet been provisioned (e.g. because we're in a pre-v4.4 migration),
|
||||
# fall back to ContentType.
|
||||
if 'core_objecttype' not in connection.introspection.table_names():
|
||||
ct = ContentType.objects.get_for_model(model, for_concrete_model=for_concrete_model)
|
||||
ct.features = get_model_features(ct.model_class())
|
||||
return ct
|
||||
|
||||
if not inspect.isclass(model):
|
||||
model = model.__class__
|
||||
opts = self._get_opts(model, for_concrete_model)
|
||||
|
||||
@@ -8,6 +8,7 @@ from rq.job import Job as RQ_Job, JobStatus
|
||||
from rq.registry import FailedJobRegistry, StartedJobRegistry
|
||||
|
||||
from rest_framework import status
|
||||
from users.constants import TOKEN_PREFIX
|
||||
from users.models import Token, User
|
||||
from utilities.testing import APITestCase, APIViewTestCases, TestCase
|
||||
from utilities.testing.utils import disable_logging
|
||||
@@ -107,14 +108,14 @@ class ObjectTypeTest(APITestCase):
|
||||
def test_list_objects(self):
|
||||
object_type_count = ObjectType.objects.count()
|
||||
|
||||
response = self.client.get(reverse('extras-api:objecttype-list'), **self.header)
|
||||
response = self.client.get(reverse('core-api:objecttype-list'), **self.header)
|
||||
self.assertHttpStatus(response, status.HTTP_200_OK)
|
||||
self.assertEqual(response.data['count'], object_type_count)
|
||||
|
||||
def test_get_object(self):
|
||||
object_type = ObjectType.objects.first()
|
||||
|
||||
url = reverse('extras-api:objecttype-detail', kwargs={'pk': object_type.pk})
|
||||
url = reverse('core-api:objecttype-detail', kwargs={'pk': object_type.pk})
|
||||
self.assertHttpStatus(self.client.get(url, **self.header), status.HTTP_200_OK)
|
||||
|
||||
|
||||
@@ -134,12 +135,9 @@ class BackgroundTaskTestCase(TestCase):
|
||||
Create a user and token for API calls.
|
||||
"""
|
||||
# Create the test user and assign permissions
|
||||
self.user = User.objects.create_user(username='testuser')
|
||||
self.user.is_staff = True
|
||||
self.user.is_active = True
|
||||
self.user.save()
|
||||
self.user = User.objects.create_user(username='testuser', is_active=True)
|
||||
self.token = Token.objects.create(user=self.user)
|
||||
self.header = {'HTTP_AUTHORIZATION': f'Token {self.token.key}'}
|
||||
self.header = {'HTTP_AUTHORIZATION': f'Bearer {TOKEN_PREFIX}{self.token.key}.{self.token.token}'}
|
||||
|
||||
# Clear all queues prior to running each test
|
||||
get_queue('default').connection.flushall()
|
||||
@@ -150,13 +148,11 @@ class BackgroundTaskTestCase(TestCase):
|
||||
url = reverse('core-api:rqqueue-list')
|
||||
|
||||
# Attempt to load view without permission
|
||||
self.user.is_staff = False
|
||||
self.user.save()
|
||||
response = self.client.get(url, **self.header)
|
||||
self.assertEqual(response.status_code, 403)
|
||||
|
||||
# Load view with permission
|
||||
self.user.is_staff = True
|
||||
self.user.is_superuser = True
|
||||
self.user.save()
|
||||
response = self.client.get(url, **self.header)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
@@ -165,7 +161,16 @@ class BackgroundTaskTestCase(TestCase):
|
||||
self.assertIn('low', str(response.content))
|
||||
|
||||
def test_background_queue(self):
|
||||
response = self.client.get(reverse('core-api:rqqueue-detail', args=['default']), **self.header)
|
||||
url = reverse('core-api:rqqueue-detail', args=['default'])
|
||||
|
||||
# Attempt to load view without permission
|
||||
response = self.client.get(url, **self.header)
|
||||
self.assertEqual(response.status_code, 403)
|
||||
|
||||
# Load view with permission
|
||||
self.user.is_superuser = True
|
||||
self.user.save()
|
||||
response = self.client.get(url, **self.header)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertIn('default', str(response.content))
|
||||
self.assertIn('oldest_job_timestamp', str(response.content))
|
||||
@@ -174,8 +179,16 @@ class BackgroundTaskTestCase(TestCase):
|
||||
def test_background_task_list(self):
|
||||
queue = get_queue('default')
|
||||
queue.enqueue(self.dummy_job_default)
|
||||
url = reverse('core-api:rqtask-list')
|
||||
|
||||
response = self.client.get(reverse('core-api:rqtask-list'), **self.header)
|
||||
# Attempt to load view without permission
|
||||
response = self.client.get(url, **self.header)
|
||||
self.assertEqual(response.status_code, 403)
|
||||
|
||||
# Load view with permission
|
||||
self.user.is_superuser = True
|
||||
self.user.save()
|
||||
response = self.client.get(url, **self.header)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertIn('origin', str(response.content))
|
||||
self.assertIn('core.tests.test_api.BackgroundTaskTestCase.dummy_job_default()', str(response.content))
|
||||
@@ -183,8 +196,16 @@ class BackgroundTaskTestCase(TestCase):
|
||||
def test_background_task(self):
|
||||
queue = get_queue('default')
|
||||
job = queue.enqueue(self.dummy_job_default)
|
||||
url = reverse('core-api:rqtask-detail', args=[job.id])
|
||||
|
||||
response = self.client.get(reverse('core-api:rqtask-detail', args=[job.id]), **self.header)
|
||||
# Attempt to load view without permission
|
||||
response = self.client.get(url, **self.header)
|
||||
self.assertEqual(response.status_code, 403)
|
||||
|
||||
# Load view with permission
|
||||
self.user.is_superuser = True
|
||||
self.user.save()
|
||||
response = self.client.get(url, **self.header)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertIn(str(job.id), str(response.content))
|
||||
self.assertIn('origin', str(response.content))
|
||||
@@ -194,45 +215,65 @@ class BackgroundTaskTestCase(TestCase):
|
||||
def test_background_task_delete(self):
|
||||
queue = get_queue('default')
|
||||
job = queue.enqueue(self.dummy_job_default)
|
||||
url = reverse('core-api:rqtask-delete', args=[job.id])
|
||||
|
||||
response = self.client.post(reverse('core-api:rqtask-delete', args=[job.id]), **self.header)
|
||||
# Attempt to load view without permission
|
||||
response = self.client.get(url, **self.header)
|
||||
self.assertEqual(response.status_code, 403)
|
||||
|
||||
# Load view with permission
|
||||
self.user.is_superuser = True
|
||||
self.user.save()
|
||||
response = self.client.post(url, **self.header)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertFalse(RQ_Job.exists(job.id, connection=queue.connection))
|
||||
queue = get_queue('default')
|
||||
self.assertNotIn(job.id, queue.job_ids)
|
||||
|
||||
def test_background_task_requeue(self):
|
||||
queue = get_queue('default')
|
||||
|
||||
# Enqueue & run a job that will fail
|
||||
queue = get_queue('default')
|
||||
job = queue.enqueue(self.dummy_job_failing)
|
||||
worker = get_worker('default')
|
||||
with disable_logging():
|
||||
worker.work(burst=True)
|
||||
self.assertTrue(job.is_failed)
|
||||
url = reverse('core-api:rqtask-requeue', args=[job.id])
|
||||
|
||||
# Attempt to requeue the job without permission
|
||||
response = self.client.post(url, **self.header)
|
||||
self.assertEqual(response.status_code, 403)
|
||||
|
||||
# Re-enqueue the failed job and check that its status has been reset
|
||||
response = self.client.post(reverse('core-api:rqtask-requeue', args=[job.id]), **self.header)
|
||||
self.user.is_superuser = True
|
||||
self.user.save()
|
||||
response = self.client.post(url, **self.header)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
job = RQ_Job.fetch(job.id, queue.connection)
|
||||
self.assertFalse(job.is_failed)
|
||||
|
||||
def test_background_task_enqueue(self):
|
||||
queue = get_queue('default')
|
||||
|
||||
# Enqueue some jobs that each depends on its predecessor
|
||||
queue = get_queue('default')
|
||||
job = previous_job = None
|
||||
for _ in range(0, 3):
|
||||
job = queue.enqueue(self.dummy_job_default, depends_on=previous_job)
|
||||
previous_job = job
|
||||
url = reverse('core-api:rqtask-enqueue', args=[job.id])
|
||||
|
||||
# Check that the last job to be enqueued has a status of deferred
|
||||
self.assertIsNotNone(job)
|
||||
self.assertEqual(job.get_status(), JobStatus.DEFERRED)
|
||||
self.assertIsNone(job.enqueued_at)
|
||||
|
||||
# Attempt to force-enqueue the job without permission
|
||||
response = self.client.post(url, **self.header)
|
||||
self.assertEqual(response.status_code, 403)
|
||||
|
||||
# Force-enqueue the deferred job
|
||||
response = self.client.post(reverse('core-api:rqtask-enqueue', args=[job.id]), **self.header)
|
||||
self.user.is_superuser = True
|
||||
self.user.save()
|
||||
response = self.client.post(url, **self.header)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
# Check that job's status is updated correctly
|
||||
@@ -242,19 +283,27 @@ class BackgroundTaskTestCase(TestCase):
|
||||
|
||||
def test_background_task_stop(self):
|
||||
queue = get_queue('default')
|
||||
|
||||
worker = get_worker('default')
|
||||
job = queue.enqueue(self.dummy_job_default)
|
||||
worker.prepare_job_execution(job)
|
||||
|
||||
url = reverse('core-api:rqtask-stop', args=[job.id])
|
||||
self.assertEqual(job.get_status(), JobStatus.STARTED)
|
||||
response = self.client.post(reverse('core-api:rqtask-stop', args=[job.id]), **self.header)
|
||||
|
||||
# Attempt to stop the task without permission
|
||||
response = self.client.post(url, **self.header)
|
||||
self.assertEqual(response.status_code, 403)
|
||||
|
||||
# Stop the task
|
||||
self.user.is_superuser = True
|
||||
self.user.save()
|
||||
response = self.client.post(url, **self.header)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
with disable_logging():
|
||||
worker.monitor_work_horse(job, queue) # Sets the job as Failed and removes from Started
|
||||
started_job_registry = StartedJobRegistry(queue.name, connection=queue.connection)
|
||||
self.assertEqual(len(started_job_registry), 0)
|
||||
|
||||
# Verify that the task was cancelled
|
||||
canceled_job_registry = FailedJobRegistry(queue.name, connection=queue.connection)
|
||||
self.assertEqual(len(canceled_job_registry), 1)
|
||||
self.assertIn(job.id, canceled_job_registry)
|
||||
@@ -262,19 +311,34 @@ class BackgroundTaskTestCase(TestCase):
|
||||
def test_worker_list(self):
|
||||
worker1 = get_worker('default', name=uuid.uuid4().hex)
|
||||
worker1.register_birth()
|
||||
|
||||
worker2 = get_worker('high')
|
||||
worker2.register_birth()
|
||||
url = reverse('core-api:rqworker-list')
|
||||
|
||||
response = self.client.get(reverse('core-api:rqworker-list'), **self.header)
|
||||
# Attempt to fetch the worker list without permission
|
||||
response = self.client.get(url, **self.header)
|
||||
self.assertEqual(response.status_code, 403)
|
||||
|
||||
# Fetch the worker list
|
||||
self.user.is_superuser = True
|
||||
self.user.save()
|
||||
response = self.client.get(url, **self.header)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertIn(str(worker1.name), str(response.content))
|
||||
|
||||
def test_worker(self):
|
||||
worker1 = get_worker('default', name=uuid.uuid4().hex)
|
||||
worker1.register_birth()
|
||||
url = reverse('core-api:rqworker-detail', args=[worker1.name])
|
||||
|
||||
response = self.client.get(reverse('core-api:rqworker-detail', args=[worker1.name]), **self.header)
|
||||
# Attempt to fetch a worker without permission
|
||||
response = self.client.get(url, **self.header)
|
||||
self.assertEqual(response.status_code, 403)
|
||||
|
||||
# Fetch the worker
|
||||
self.user.is_superuser = True
|
||||
self.user.save()
|
||||
response = self.client.get(url, **self.header)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertIn(str(worker1.name), str(response.content))
|
||||
self.assertIn('birth_date', str(response.content))
|
||||
|
||||
@@ -158,7 +158,7 @@ class BackgroundTaskTestCase(TestCase):
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.user.is_staff = True
|
||||
self.user.is_superuser = True
|
||||
self.user.is_active = True
|
||||
self.user.save()
|
||||
|
||||
@@ -171,13 +171,13 @@ class BackgroundTaskTestCase(TestCase):
|
||||
url = reverse('core:background_queue_list')
|
||||
|
||||
# Attempt to load view without permission
|
||||
self.user.is_staff = False
|
||||
self.user.is_superuser = False
|
||||
self.user.save()
|
||||
response = self.client.get(url)
|
||||
self.assertEqual(response.status_code, 403)
|
||||
|
||||
# Load view with permission
|
||||
self.user.is_staff = True
|
||||
self.user.is_superuser = True
|
||||
self.user.save()
|
||||
response = self.client.get(url)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
@@ -356,7 +356,7 @@ class SystemTestCase(TestCase):
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
|
||||
self.user.is_staff = True
|
||||
self.user.is_superuser = True
|
||||
self.user.save()
|
||||
|
||||
def test_system_view_default(self):
|
||||
|
||||
@@ -372,7 +372,7 @@ class ConfigRevisionRestoreView(ContentTypePermissionRequiredMixin, View):
|
||||
class BaseRQView(UserPassesTestMixin, View):
|
||||
|
||||
def test_func(self):
|
||||
return self.request.user.is_staff
|
||||
return self.request.user.is_superuser
|
||||
|
||||
|
||||
class BackgroundQueueListView(TableMixin, BaseRQView):
|
||||
@@ -555,7 +555,7 @@ class WorkerView(BaseRQView):
|
||||
class SystemView(UserPassesTestMixin, View):
|
||||
|
||||
def test_func(self):
|
||||
return self.request.user.is_staff
|
||||
return self.request.user.is_superuser
|
||||
|
||||
def get(self, request):
|
||||
|
||||
@@ -638,7 +638,7 @@ class BasePluginView(UserPassesTestMixin, View):
|
||||
CACHE_KEY_CATALOG_ERROR = 'plugins-catalog-error'
|
||||
|
||||
def test_func(self):
|
||||
return self.request.user.is_staff
|
||||
return self.request.user.is_superuser
|
||||
|
||||
def get_cached_plugins(self, request):
|
||||
catalog_plugins = {}
|
||||
|
||||
@@ -1,10 +1,8 @@
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from drf_spectacular.utils import extend_schema_field
|
||||
from rest_framework import serializers
|
||||
|
||||
from dcim.choices import *
|
||||
from dcim.constants import *
|
||||
from dcim.models import Cable, CablePath, CableTermination
|
||||
from netbox.api.fields import ChoiceField, ContentTypeField
|
||||
from netbox.api.serializers import BaseModelSerializer, GenericObjectSerializer, NetBoxModelSerializer
|
||||
@@ -51,9 +49,11 @@ class TracedCableSerializer(BaseModelSerializer):
|
||||
|
||||
class CableTerminationSerializer(NetBoxModelSerializer):
|
||||
termination_type = ContentTypeField(
|
||||
queryset=ContentType.objects.filter(CABLE_TERMINATION_MODELS)
|
||||
read_only=True,
|
||||
)
|
||||
termination = serializers.SerializerMethodField(
|
||||
read_only=True,
|
||||
)
|
||||
termination = serializers.SerializerMethodField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = CableTermination
|
||||
@@ -61,6 +61,8 @@ class CableTerminationSerializer(NetBoxModelSerializer):
|
||||
'id', 'url', 'display', 'cable', 'cable_end', 'termination_type', 'termination_id',
|
||||
'termination', 'created', 'last_updated',
|
||||
]
|
||||
read_only_fields = fields
|
||||
brief_fields = ('id', 'url', 'display', 'cable', 'cable_end', 'termination_type', 'termination_id')
|
||||
|
||||
@extend_schema_field(serializers.JSONField(allow_null=True))
|
||||
def get_termination(self, obj):
|
||||
|
||||
@@ -155,7 +155,7 @@ class PowerOutletTemplateSerializer(ComponentTemplateSerializer):
|
||||
model = PowerOutletTemplate
|
||||
fields = [
|
||||
'id', 'url', 'display', 'device_type', 'module_type', 'name', 'label', 'type',
|
||||
'power_port', 'feed_leg', 'description', 'created', 'last_updated',
|
||||
'color', 'power_port', 'feed_leg', 'description', 'created', 'last_updated',
|
||||
]
|
||||
brief_fields = ('id', 'url', 'display', 'name', 'description')
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ from extras.api.mixins import ConfigContextQuerySetMixin, RenderConfigMixin
|
||||
from netbox.api.authentication import IsAuthenticatedOrLoginNotRequired
|
||||
from netbox.api.metadata import ContentTypeMetadata
|
||||
from netbox.api.pagination import StripCountAnnotationsPaginator
|
||||
from netbox.api.viewsets import NetBoxModelViewSet, MPTTLockedMixin
|
||||
from netbox.api.viewsets import NetBoxModelViewSet, MPTTLockedMixin, NetBoxReadOnlyModelViewSet
|
||||
from netbox.api.viewsets.mixins import SequentialBulkCreatesMixin
|
||||
from utilities.api import get_serializer_for_model
|
||||
from utilities.query_functions import CollateAsChar
|
||||
@@ -563,7 +563,7 @@ class CableViewSet(NetBoxModelViewSet):
|
||||
filterset_class = filtersets.CableFilterSet
|
||||
|
||||
|
||||
class CableTerminationViewSet(NetBoxModelViewSet):
|
||||
class CableTerminationViewSet(NetBoxReadOnlyModelViewSet):
|
||||
metadata_class = ContentTypeMetadata
|
||||
queryset = CableTermination.objects.all()
|
||||
serializer_class = serializers.CableTerminationSerializer
|
||||
|
||||
@@ -842,7 +842,7 @@ class PowerOutletTemplateFilterSet(ChangeLoggedModelFilterSet, ModularDeviceType
|
||||
|
||||
class Meta:
|
||||
model = PowerOutletTemplate
|
||||
fields = ('id', 'name', 'label', 'type', 'feed_leg', 'description')
|
||||
fields = ('id', 'name', 'label', 'type', 'color', 'feed_leg', 'description')
|
||||
|
||||
|
||||
class InterfaceTemplateFilterSet(ChangeLoggedModelFilterSet, ModularDeviceTypeComponentFilterSet):
|
||||
@@ -1764,6 +1764,7 @@ class PowerOutletFilterSet(
|
||||
|
||||
class MACAddressFilterSet(NetBoxModelFilterSet):
|
||||
mac_address = MultiValueMACAddressFilter()
|
||||
assigned_object_type = ContentTypeFilter()
|
||||
device = MultiValueCharFilter(
|
||||
method='filter_device',
|
||||
field_name='name',
|
||||
|
||||
@@ -1163,6 +1163,10 @@ class PowerOutletTemplateBulkEditForm(ComponentTemplateBulkEditForm):
|
||||
choices=add_blank_choice(PowerOutletTypeChoices),
|
||||
required=False
|
||||
)
|
||||
color = ColorField(
|
||||
label=_('Color'),
|
||||
required=False
|
||||
)
|
||||
power_port = forms.ModelChoiceField(
|
||||
label=_('Power port'),
|
||||
queryset=PowerPortTemplate.objects.all(),
|
||||
|
||||
@@ -1092,14 +1092,14 @@ class PowerOutletTemplateForm(ModularComponentTemplateForm):
|
||||
FieldSet('device_type', name=_('Device Type')),
|
||||
FieldSet('module_type', name=_('Module Type')),
|
||||
),
|
||||
'name', 'label', 'type', 'power_port', 'feed_leg', 'description',
|
||||
'name', 'label', 'type', 'color', 'power_port', 'feed_leg', 'description',
|
||||
),
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = PowerOutletTemplate
|
||||
fields = [
|
||||
'device_type', 'module_type', 'name', 'label', 'type', 'power_port', 'feed_leg', 'description',
|
||||
'device_type', 'module_type', 'name', 'label', 'type', 'color', 'power_port', 'feed_leg', 'description',
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from strawberry.types import Info
|
||||
|
||||
from circuits.graphql.types import CircuitTerminationType, ProviderNetworkType
|
||||
from circuits.models import CircuitTermination, ProviderNetwork
|
||||
from dcim.graphql.types import (
|
||||
@@ -49,7 +51,7 @@ class InventoryItemTemplateComponentType:
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def resolve_type(cls, instance, info):
|
||||
def resolve_type(cls, instance, info: Info):
|
||||
if type(instance) is ConsolePortTemplate:
|
||||
return ConsolePortTemplateType
|
||||
if type(instance) is ConsoleServerPortTemplate:
|
||||
@@ -79,7 +81,7 @@ class InventoryItemComponentType:
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def resolve_type(cls, instance, info):
|
||||
def resolve_type(cls, instance, info: Info):
|
||||
if type(instance) is ConsolePort:
|
||||
return ConsolePortType
|
||||
if type(instance) is ConsoleServerPort:
|
||||
@@ -112,7 +114,7 @@ class ConnectedEndpointType:
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def resolve_type(cls, instance, info):
|
||||
def resolve_type(cls, instance, info: Info):
|
||||
if type(instance) is CircuitTermination:
|
||||
return CircuitTerminationType
|
||||
if type(instance) is ConsolePortType:
|
||||
|
||||
@@ -673,6 +673,7 @@ class PowerOutletType(ModularComponentType, CabledObjectMixin, PathEndpointMixin
|
||||
)
|
||||
class PowerOutletTemplateType(ModularComponentTemplateType):
|
||||
power_port: Annotated["PowerPortTemplateType", strawberry.lazy('dcim.graphql.types')] | None
|
||||
color: str
|
||||
|
||||
|
||||
@strawberry_django.type(
|
||||
|
||||
17
netbox/dcim/migrations/0216_poweroutlettemplate_color.py
Normal file
17
netbox/dcim/migrations/0216_poweroutlettemplate_color.py
Normal file
@@ -0,0 +1,17 @@
|
||||
import utilities.fields
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('dcim', '0215_rackreservation_status'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='poweroutlettemplate',
|
||||
name='color',
|
||||
field=utilities.fields.ColorField(blank=True, max_length=6),
|
||||
),
|
||||
]
|
||||
26
netbox/dcim/migrations/0217_ci_collations.py
Normal file
26
netbox/dcim/migrations/0217_ci_collations.py
Normal file
@@ -0,0 +1,26 @@
|
||||
from django.contrib.postgres.operations import CreateCollation
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('dcim', '0216_poweroutlettemplate_color'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
# Create a case-insensitive collation
|
||||
CreateCollation(
|
||||
'case_insensitive',
|
||||
provider='icu',
|
||||
locale='und-u-ks-level2',
|
||||
deterministic=False,
|
||||
),
|
||||
# Create a case-insensitive collation with natural sorting
|
||||
CreateCollation(
|
||||
'ci_natural_sort',
|
||||
provider='icu',
|
||||
locale='und-u-kn-true-ks-level2',
|
||||
deterministic=False,
|
||||
),
|
||||
]
|
||||
311
netbox/dcim/migrations/0218_ci_collations.py
Normal file
311
netbox/dcim/migrations/0218_ci_collations.py
Normal file
@@ -0,0 +1,311 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
PATTERN_OPS_INDEXES = [
|
||||
'dcim_devicerole_slug_7952643b_like',
|
||||
'dcim_devicetype_slug_448745bd_like',
|
||||
'dcim_inventoryitemrole_name_4c8cfe6d_like',
|
||||
'dcim_inventoryitemrole_slug_3556c227_like',
|
||||
'dcim_location_slug_352c5472_like',
|
||||
'dcim_manufacturer_name_841fcd92_like',
|
||||
'dcim_manufacturer_slug_00430749_like',
|
||||
'dcim_moduletypeprofile_name_1709c36e_like',
|
||||
'dcim_platform_slug_b0908ae4_like',
|
||||
'dcim_rackrole_name_9077cfcc_like',
|
||||
'dcim_rackrole_slug_40bbcd3a_like',
|
||||
'dcim_racktype_slug_6bbb384a_like',
|
||||
'dcim_region_slug_ff078a66_like',
|
||||
'dcim_site_name_8fe66c76_like',
|
||||
'dcim_site_slug_4412c762_like',
|
||||
'dcim_sitegroup_slug_a11d2b04_like',
|
||||
]
|
||||
|
||||
|
||||
def remove_indexes(apps, schema_editor):
|
||||
for idx in PATTERN_OPS_INDEXES:
|
||||
schema_editor.execute(f'DROP INDEX IF EXISTS {idx}')
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('dcim', '0217_ci_collations'),
|
||||
('extras', '0134_ci_collations'),
|
||||
('ipam', '0083_ci_collations'),
|
||||
('tenancy', '0021_ci_collations'),
|
||||
('virtualization', '0048_populate_mac_addresses'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
code=remove_indexes,
|
||||
reverse_code=migrations.RunPython.noop,
|
||||
),
|
||||
migrations.RemoveConstraint(
|
||||
model_name='device',
|
||||
name='dcim_device_unique_name_site_tenant',
|
||||
),
|
||||
migrations.RemoveConstraint(
|
||||
model_name='device',
|
||||
name='dcim_device_unique_name_site',
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='consoleport',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=64),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='consoleporttemplate',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=64),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='consoleserverport',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=64),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='consoleserverporttemplate',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=64),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='device',
|
||||
name='name',
|
||||
field=models.CharField(blank=True, db_collation='ci_natural_sort', max_length=64, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='devicebay',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=64),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='devicebaytemplate',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=64),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='devicerole',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='devicerole',
|
||||
name='slug',
|
||||
field=models.SlugField(db_collation='case_insensitive', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='devicetype',
|
||||
name='model',
|
||||
field=models.CharField(db_collation='case_insensitive', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='devicetype',
|
||||
name='slug',
|
||||
field=models.SlugField(db_collation='case_insensitive', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='frontport',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=64),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='frontporttemplate',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=64),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='interface',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=64),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='interfacetemplate',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=64),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventoryitem',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=64),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventoryitemrole',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventoryitemrole',
|
||||
name='slug',
|
||||
field=models.SlugField(db_collation='case_insensitive', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventoryitemtemplate',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=64),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='location',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='location',
|
||||
name='slug',
|
||||
field=models.SlugField(db_collation='case_insensitive', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='manufacturer',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='manufacturer',
|
||||
name='slug',
|
||||
field=models.SlugField(db_collation='case_insensitive', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='modulebay',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=64),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='modulebaytemplate',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=64),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='moduletype',
|
||||
name='model',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='moduletypeprofile',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='platform',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='platform',
|
||||
name='slug',
|
||||
field=models.SlugField(db_collation='case_insensitive', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='powerfeed',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='poweroutlet',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=64),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='poweroutlettemplate',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=64),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='powerpanel',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='powerport',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=64),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='powerporttemplate',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=64),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='rack',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='rackrole',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='rackrole',
|
||||
name='slug',
|
||||
field=models.SlugField(db_collation='case_insensitive', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='racktype',
|
||||
name='model',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='racktype',
|
||||
name='slug',
|
||||
field=models.SlugField(db_collation='case_insensitive', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='rearport',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=64),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='rearporttemplate',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=64),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='region',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='region',
|
||||
name='slug',
|
||||
field=models.SlugField(db_collation='case_insensitive', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='site',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='site',
|
||||
name='slug',
|
||||
field=models.SlugField(db_collation='case_insensitive', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='sitegroup',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='sitegroup',
|
||||
name='slug',
|
||||
field=models.SlugField(db_collation='case_insensitive', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='virtualdevicecontext',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=64),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name='device',
|
||||
constraint=models.UniqueConstraint(
|
||||
models.F('name'), models.F('site'), models.F('tenant'), name='dcim_device_unique_name_site_tenant'
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name='device',
|
||||
constraint=models.UniqueConstraint(
|
||||
models.F('name'),
|
||||
models.F('site'),
|
||||
condition=models.Q(('tenant__isnull', True)),
|
||||
name='dcim_device_unique_name_site',
|
||||
violation_error_message='Device name must be unique per site.',
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -7,6 +7,7 @@ from mptt.models import MPTTModel, TreeForeignKey
|
||||
|
||||
from dcim.choices import *
|
||||
from dcim.constants import *
|
||||
from dcim.models.mixins import InterfaceValidationMixin
|
||||
from netbox.models import ChangeLoggedModel
|
||||
from utilities.fields import ColorField, NaturalOrderingField
|
||||
from utilities.mptt import TreeManager
|
||||
@@ -42,10 +43,10 @@ class ComponentTemplateModel(ChangeLoggedModel, TrackingModelMixin):
|
||||
name = models.CharField(
|
||||
verbose_name=_('name'),
|
||||
max_length=64,
|
||||
db_collation='ci_natural_sort',
|
||||
help_text=_(
|
||||
"{module} is accepted as a substitution for the module bay position when attached to a module type."
|
||||
),
|
||||
db_collation="natural_sort"
|
||||
)
|
||||
label = models.CharField(
|
||||
verbose_name=_('label'),
|
||||
@@ -338,6 +339,10 @@ class PowerOutletTemplate(ModularComponentTemplateModel):
|
||||
blank=True,
|
||||
null=True
|
||||
)
|
||||
color = ColorField(
|
||||
verbose_name=_('color'),
|
||||
blank=True
|
||||
)
|
||||
power_port = models.ForeignKey(
|
||||
to='dcim.PowerPortTemplate',
|
||||
on_delete=models.SET_NULL,
|
||||
@@ -388,6 +393,7 @@ class PowerOutletTemplate(ModularComponentTemplateModel):
|
||||
name=self.resolve_name(kwargs.get('module')),
|
||||
label=self.resolve_label(kwargs.get('module')),
|
||||
type=self.type,
|
||||
color=self.color,
|
||||
power_port=power_port,
|
||||
feed_leg=self.feed_leg,
|
||||
**kwargs
|
||||
@@ -398,6 +404,7 @@ class PowerOutletTemplate(ModularComponentTemplateModel):
|
||||
return {
|
||||
'name': self.name,
|
||||
'type': self.type,
|
||||
'color': self.color,
|
||||
'power_port': self.power_port.name if self.power_port else None,
|
||||
'feed_leg': self.feed_leg,
|
||||
'label': self.label,
|
||||
@@ -405,7 +412,7 @@ class PowerOutletTemplate(ModularComponentTemplateModel):
|
||||
}
|
||||
|
||||
|
||||
class InterfaceTemplate(ModularComponentTemplateModel):
|
||||
class InterfaceTemplate(InterfaceValidationMixin, ModularComponentTemplateModel):
|
||||
"""
|
||||
A template for a physical data interface on a new Device.
|
||||
"""
|
||||
@@ -469,8 +476,6 @@ class InterfaceTemplate(ModularComponentTemplateModel):
|
||||
super().clean()
|
||||
|
||||
if self.bridge:
|
||||
if self.pk and self.bridge_id == self.pk:
|
||||
raise ValidationError({'bridge': _("An interface cannot be bridged to itself.")})
|
||||
if self.device_type and self.device_type != self.bridge.device_type:
|
||||
raise ValidationError({
|
||||
'bridge': _(
|
||||
@@ -484,11 +489,6 @@ class InterfaceTemplate(ModularComponentTemplateModel):
|
||||
).format(bridge=self.bridge)
|
||||
})
|
||||
|
||||
if self.rf_role and self.type not in WIRELESS_IFACE_TYPES:
|
||||
raise ValidationError({
|
||||
'rf_role': "Wireless role may be set only on wireless interfaces."
|
||||
})
|
||||
|
||||
def instantiate(self, **kwargs):
|
||||
return self.component_model(
|
||||
name=self.resolve_name(kwargs.get('module')),
|
||||
|
||||
@@ -11,6 +11,7 @@ from mptt.models import MPTTModel, TreeForeignKey
|
||||
from dcim.choices import *
|
||||
from dcim.constants import *
|
||||
from dcim.fields import WWNField
|
||||
from dcim.models.mixins import InterfaceValidationMixin
|
||||
from netbox.choices import ColorChoices
|
||||
from netbox.models import OrganizationalModel, NetBoxModel
|
||||
from utilities.fields import ColorField, NaturalOrderingField
|
||||
@@ -51,7 +52,7 @@ class ComponentModel(NetBoxModel):
|
||||
name = models.CharField(
|
||||
verbose_name=_('name'),
|
||||
max_length=64,
|
||||
db_collation="natural_sort"
|
||||
db_collation='ci_natural_sort',
|
||||
)
|
||||
label = models.CharField(
|
||||
verbose_name=_('label'),
|
||||
@@ -676,7 +677,14 @@ class BaseInterface(models.Model):
|
||||
return self.primary_mac_address.mac_address
|
||||
|
||||
|
||||
class Interface(ModularComponentModel, BaseInterface, CabledObjectModel, PathEndpoint, TrackingModelMixin):
|
||||
class Interface(
|
||||
InterfaceValidationMixin,
|
||||
ModularComponentModel,
|
||||
BaseInterface,
|
||||
CabledObjectModel,
|
||||
PathEndpoint,
|
||||
TrackingModelMixin,
|
||||
):
|
||||
"""
|
||||
A network interface within a Device. A physical Interface can connect to exactly one other Interface.
|
||||
"""
|
||||
@@ -893,10 +901,6 @@ class Interface(ModularComponentModel, BaseInterface, CabledObjectModel, PathEnd
|
||||
|
||||
# Bridge validation
|
||||
|
||||
# An interface cannot be bridged to itself
|
||||
if self.pk and self.bridge_id == self.pk:
|
||||
raise ValidationError({'bridge': _("An interface cannot be bridged to itself.")})
|
||||
|
||||
# A bridged interface belongs to the same device or virtual chassis
|
||||
if self.bridge and self.bridge.device != self.device:
|
||||
if self.device.virtual_chassis is None:
|
||||
@@ -942,29 +946,9 @@ class Interface(ModularComponentModel, BaseInterface, CabledObjectModel, PathEnd
|
||||
)
|
||||
})
|
||||
|
||||
# PoE validation
|
||||
|
||||
# Only physical interfaces may have a PoE mode/type assigned
|
||||
if self.poe_mode and self.is_virtual:
|
||||
raise ValidationError({
|
||||
'poe_mode': _("Virtual interfaces cannot have a PoE mode.")
|
||||
})
|
||||
if self.poe_type and self.is_virtual:
|
||||
raise ValidationError({
|
||||
'poe_type': _("Virtual interfaces cannot have a PoE type.")
|
||||
})
|
||||
|
||||
# An interface with a PoE type set must also specify a mode
|
||||
if self.poe_type and not self.poe_mode:
|
||||
raise ValidationError({
|
||||
'poe_type': _("Must specify PoE mode when designating a PoE type.")
|
||||
})
|
||||
|
||||
# Wireless validation
|
||||
|
||||
# RF role & channel may only be set for wireless interfaces
|
||||
if self.rf_role and not self.is_wireless:
|
||||
raise ValidationError({'rf_role': _("Wireless role may be set only on wireless interfaces.")})
|
||||
# RF channel may only be set for wireless interfaces
|
||||
if self.rf_channel and not self.is_wireless:
|
||||
raise ValidationError({'rf_channel': _("Channel may be set only on wireless interfaces.")})
|
||||
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
import decimal
|
||||
import yaml
|
||||
|
||||
from functools import cached_property
|
||||
|
||||
import yaml
|
||||
from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.core.exceptions import ValidationError
|
||||
@@ -10,7 +9,6 @@ from django.core.files.storage import default_storage
|
||||
from django.core.validators import MaxValueValidator, MinValueValidator
|
||||
from django.db import models
|
||||
from django.db.models import F, ProtectedError, prefetch_related_objects
|
||||
from django.db.models.functions import Lower
|
||||
from django.db.models.signals import post_save
|
||||
from django.urls import reverse
|
||||
from django.utils.safestring import mark_safe
|
||||
@@ -25,8 +23,8 @@ from extras.querysets import ConfigContextModelQuerySet
|
||||
from netbox.choices import ColorChoices
|
||||
from netbox.config import ConfigItem
|
||||
from netbox.models import NestedGroupModel, OrganizationalModel, PrimaryModel
|
||||
from netbox.models.mixins import WeightMixin
|
||||
from netbox.models.features import ContactsMixin, ImageAttachmentsMixin
|
||||
from netbox.models.mixins import WeightMixin
|
||||
from utilities.fields import ColorField, CounterCacheField
|
||||
from utilities.prefetch import get_prefetchable_fields
|
||||
from utilities.tracking import TrackingModelMixin
|
||||
@@ -34,7 +32,6 @@ from .device_components import *
|
||||
from .mixins import RenderConfigMixin
|
||||
from .modules import Module
|
||||
|
||||
|
||||
__all__ = (
|
||||
'Device',
|
||||
'DeviceRole',
|
||||
@@ -83,11 +80,13 @@ class DeviceType(ImageAttachmentsMixin, PrimaryModel, WeightMixin):
|
||||
)
|
||||
model = models.CharField(
|
||||
verbose_name=_('model'),
|
||||
max_length=100
|
||||
max_length=100,
|
||||
db_collation='case_insensitive',
|
||||
)
|
||||
slug = models.SlugField(
|
||||
verbose_name=_('slug'),
|
||||
max_length=100
|
||||
max_length=100,
|
||||
db_collation='case_insensitive',
|
||||
)
|
||||
default_platform = models.ForeignKey(
|
||||
to='dcim.Platform',
|
||||
@@ -525,7 +524,7 @@ class Device(
|
||||
max_length=64,
|
||||
blank=True,
|
||||
null=True,
|
||||
db_collation="natural_sort"
|
||||
db_collation='ci_natural_sort',
|
||||
)
|
||||
serial = models.CharField(
|
||||
max_length=50,
|
||||
@@ -721,11 +720,11 @@ class Device(
|
||||
ordering = ('name', 'pk') # Name may be null
|
||||
constraints = (
|
||||
models.UniqueConstraint(
|
||||
Lower('name'), 'site', 'tenant',
|
||||
'name', 'site', 'tenant',
|
||||
name='%(app_label)s_%(class)s_unique_name_site_tenant'
|
||||
),
|
||||
models.UniqueConstraint(
|
||||
Lower('name'), 'site',
|
||||
'name', 'site',
|
||||
name='%(app_label)s_%(class)s_unique_name_site',
|
||||
condition=Q(tenant__isnull=True),
|
||||
violation_error_message=_("Device name must be unique per site.")
|
||||
@@ -1119,7 +1118,7 @@ class VirtualChassis(PrimaryModel):
|
||||
name = models.CharField(
|
||||
verbose_name=_('name'),
|
||||
max_length=64,
|
||||
db_collation="natural_sort"
|
||||
db_collation='natural_sort',
|
||||
)
|
||||
domain = models.CharField(
|
||||
verbose_name=_('domain'),
|
||||
@@ -1182,7 +1181,7 @@ class VirtualDeviceContext(PrimaryModel):
|
||||
name = models.CharField(
|
||||
verbose_name=_('name'),
|
||||
max_length=64,
|
||||
db_collation="natural_sort"
|
||||
db_collation='ci_natural_sort',
|
||||
)
|
||||
status = models.CharField(
|
||||
verbose_name=_('status'),
|
||||
|
||||
@@ -4,8 +4,11 @@ from django.core.exceptions import ValidationError
|
||||
from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from dcim.constants import VIRTUAL_IFACE_TYPES, WIRELESS_IFACE_TYPES
|
||||
|
||||
__all__ = (
|
||||
'CachedScopeMixin',
|
||||
'InterfaceValidationMixin',
|
||||
'RenderConfigMixin',
|
||||
)
|
||||
|
||||
@@ -116,3 +119,33 @@ class CachedScopeMixin(models.Model):
|
||||
self._site = self.scope.site
|
||||
self._location = self.scope
|
||||
cache_related_objects.alters_data = True
|
||||
|
||||
|
||||
class InterfaceValidationMixin:
|
||||
|
||||
def clean(self):
|
||||
super().clean()
|
||||
|
||||
# An interface cannot be bridged to itself
|
||||
if self.pk and self.bridge_id == self.pk:
|
||||
raise ValidationError({'bridge': _("An interface cannot be bridged to itself.")})
|
||||
|
||||
# Only physical interfaces may have a PoE mode/type assigned
|
||||
if self.poe_mode and self.type in VIRTUAL_IFACE_TYPES:
|
||||
raise ValidationError({
|
||||
'poe_mode': _("Virtual interfaces cannot have a PoE mode.")
|
||||
})
|
||||
if self.poe_type and self.type in VIRTUAL_IFACE_TYPES:
|
||||
raise ValidationError({
|
||||
'poe_type': _("Virtual interfaces cannot have a PoE type.")
|
||||
})
|
||||
|
||||
# An interface with a PoE type set must also specify a mode
|
||||
if self.poe_type and not self.poe_mode:
|
||||
raise ValidationError({
|
||||
'poe_type': _("Must specify PoE mode when designating a PoE type.")
|
||||
})
|
||||
|
||||
# RF role may be set only for wireless interfaces
|
||||
if self.rf_role and self.type not in WIRELESS_IFACE_TYPES:
|
||||
raise ValidationError({'rf_role': _("Wireless role may be set only on wireless interfaces.")})
|
||||
|
||||
@@ -31,7 +31,8 @@ class ModuleTypeProfile(PrimaryModel):
|
||||
name = models.CharField(
|
||||
verbose_name=_('name'),
|
||||
max_length=100,
|
||||
unique=True
|
||||
unique=True,
|
||||
db_collation='ci_natural_sort',
|
||||
)
|
||||
schema = models.JSONField(
|
||||
blank=True,
|
||||
@@ -72,7 +73,8 @@ class ModuleType(ImageAttachmentsMixin, PrimaryModel, WeightMixin):
|
||||
)
|
||||
model = models.CharField(
|
||||
verbose_name=_('model'),
|
||||
max_length=100
|
||||
max_length=100,
|
||||
db_collation='ci_natural_sort',
|
||||
)
|
||||
part_number = models.CharField(
|
||||
verbose_name=_('part number'),
|
||||
|
||||
@@ -37,7 +37,7 @@ class PowerPanel(ContactsMixin, ImageAttachmentsMixin, PrimaryModel):
|
||||
name = models.CharField(
|
||||
verbose_name=_('name'),
|
||||
max_length=100,
|
||||
db_collation="natural_sort"
|
||||
db_collation='ci_natural_sort',
|
||||
)
|
||||
|
||||
prerequisite_models = (
|
||||
@@ -88,7 +88,7 @@ class PowerFeed(PrimaryModel, PathEndpoint, CabledObjectModel):
|
||||
name = models.CharField(
|
||||
verbose_name=_('name'),
|
||||
max_length=100,
|
||||
db_collation="natural_sort"
|
||||
db_collation='ci_natural_sort',
|
||||
)
|
||||
status = models.CharField(
|
||||
verbose_name=_('status'),
|
||||
|
||||
@@ -137,12 +137,14 @@ class RackType(RackBase):
|
||||
)
|
||||
model = models.CharField(
|
||||
verbose_name=_('model'),
|
||||
max_length=100
|
||||
max_length=100,
|
||||
db_collation='ci_natural_sort',
|
||||
)
|
||||
slug = models.SlugField(
|
||||
verbose_name=_('slug'),
|
||||
max_length=100,
|
||||
unique=True
|
||||
unique=True,
|
||||
db_collation='case_insensitive',
|
||||
)
|
||||
|
||||
clone_fields = (
|
||||
@@ -262,7 +264,7 @@ class Rack(ContactsMixin, ImageAttachmentsMixin, RackBase):
|
||||
name = models.CharField(
|
||||
verbose_name=_('name'),
|
||||
max_length=100,
|
||||
db_collation="natural_sort"
|
||||
db_collation='ci_natural_sort',
|
||||
)
|
||||
facility_id = models.CharField(
|
||||
max_length=50,
|
||||
|
||||
@@ -142,13 +142,14 @@ class Site(ContactsMixin, ImageAttachmentsMixin, PrimaryModel):
|
||||
verbose_name=_('name'),
|
||||
max_length=100,
|
||||
unique=True,
|
||||
help_text=_("Full name of the site"),
|
||||
db_collation="natural_sort"
|
||||
db_collation='ci_natural_sort',
|
||||
help_text=_("Full name of the site")
|
||||
)
|
||||
slug = models.SlugField(
|
||||
verbose_name=_('slug'),
|
||||
max_length=100,
|
||||
unique=True
|
||||
unique=True,
|
||||
db_collation='case_insensitive',
|
||||
)
|
||||
status = models.CharField(
|
||||
verbose_name=_('status'),
|
||||
|
||||
@@ -196,7 +196,7 @@ class DeviceTable(TenancyColumnsMixin, ContactsColumnMixin, NetBoxTable):
|
||||
verbose_name=_('Type')
|
||||
)
|
||||
u_height = columns.TemplateColumn(
|
||||
accessor=tables.A('device_type.u_height'),
|
||||
accessor=tables.A('device_type__u_height'),
|
||||
verbose_name=_('U Height'),
|
||||
template_code='{{ value|floatformat }}'
|
||||
)
|
||||
|
||||
@@ -211,6 +211,9 @@ class PowerPortTemplateTable(ComponentTemplateTable):
|
||||
|
||||
|
||||
class PowerOutletTemplateTable(ComponentTemplateTable):
|
||||
color = columns.ColorColumn(
|
||||
verbose_name=_('Color'),
|
||||
)
|
||||
actions = columns.ActionsColumn(
|
||||
actions=('edit', 'delete'),
|
||||
extra_buttons=MODULAR_COMPONENT_TEMPLATE_BUTTONS
|
||||
@@ -218,7 +221,7 @@ class PowerOutletTemplateTable(ComponentTemplateTable):
|
||||
|
||||
class Meta(ComponentTemplateTable.Meta):
|
||||
model = models.PowerOutletTemplate
|
||||
fields = ('pk', 'name', 'label', 'type', 'power_port', 'feed_leg', 'description', 'actions')
|
||||
fields = ('pk', 'name', 'label', 'type', 'color', 'power_port', 'feed_leg', 'description', 'actions')
|
||||
empty_text = "None"
|
||||
|
||||
|
||||
|
||||
@@ -13,7 +13,8 @@ from ipam.choices import VLANQinQRoleChoices
|
||||
from ipam.models import ASN, RIR, VLAN, VRF
|
||||
from netbox.api.serializers import GenericObjectSerializer
|
||||
from tenancy.models import Tenant
|
||||
from users.models import User
|
||||
from users.constants import TOKEN_PREFIX
|
||||
from users.models import Token, User
|
||||
from utilities.testing import APITestCase, APIViewTestCases, create_test_device, disable_logging
|
||||
from virtualization.models import Cluster, ClusterType
|
||||
from wireless.choices import WirelessChannelChoices
|
||||
@@ -1306,7 +1307,6 @@ class DeviceTest(APIViewTestCases.APIViewTestCase):
|
||||
}
|
||||
user_permissions = (
|
||||
'dcim.view_site', 'dcim.view_rack', 'dcim.view_location', 'dcim.view_devicerole', 'dcim.view_devicetype',
|
||||
'extras.view_configtemplate',
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@@ -1486,12 +1486,58 @@ class DeviceTest(APIViewTestCases.APIViewTestCase):
|
||||
device.config_template = configtemplate
|
||||
device.save()
|
||||
|
||||
self.add_permissions('dcim.add_device')
|
||||
url = reverse('dcim-api:device-detail', kwargs={'pk': device.pk}) + 'render-config/'
|
||||
self.add_permissions('dcim.render_config_device', 'dcim.view_device')
|
||||
url = reverse('dcim-api:device-render-config', kwargs={'pk': device.pk})
|
||||
response = self.client.post(url, {}, format='json', **self.header)
|
||||
self.assertHttpStatus(response, status.HTTP_200_OK)
|
||||
self.assertEqual(response.data['content'], f'Config for device {device.name}')
|
||||
|
||||
def test_render_config_without_permission(self):
|
||||
configtemplate = ConfigTemplate.objects.create(
|
||||
name='Config Template 1',
|
||||
template_code='Config for device {{ device.name }}'
|
||||
)
|
||||
|
||||
device = Device.objects.first()
|
||||
device.config_template = configtemplate
|
||||
device.save()
|
||||
|
||||
# No permissions added - user has no render_config permission
|
||||
url = reverse('dcim-api:device-render-config', kwargs={'pk': device.pk})
|
||||
response = self.client.post(url, {}, format='json', **self.header)
|
||||
self.assertHttpStatus(response, status.HTTP_404_NOT_FOUND)
|
||||
|
||||
def test_render_config_token_write_enabled(self):
|
||||
configtemplate = ConfigTemplate.objects.create(
|
||||
name='Config Template 1',
|
||||
template_code='Config for device {{ device.name }}'
|
||||
)
|
||||
|
||||
device = Device.objects.first()
|
||||
device.config_template = configtemplate
|
||||
device.save()
|
||||
|
||||
self.add_permissions('dcim.render_config_device', 'dcim.view_device')
|
||||
url = reverse('dcim-api:device-render-config', kwargs={'pk': device.pk})
|
||||
|
||||
# Request without token auth should fail with PermissionDenied
|
||||
response = self.client.post(url, {}, format='json')
|
||||
self.assertHttpStatus(response, status.HTTP_403_FORBIDDEN)
|
||||
|
||||
# Create token with write_enabled=False
|
||||
token = Token.objects.create(version=2, user=self.user, write_enabled=False)
|
||||
token_header = f'Bearer {TOKEN_PREFIX}{token.key}.{token.token}'
|
||||
|
||||
# Request with write-disabled token should fail
|
||||
response = self.client.post(url, {}, format='json', HTTP_AUTHORIZATION=token_header)
|
||||
self.assertHttpStatus(response, status.HTTP_403_FORBIDDEN)
|
||||
|
||||
# Enable write and retry
|
||||
token.write_enabled = True
|
||||
token.save()
|
||||
response = self.client.post(url, {}, format='json', HTTP_AUTHORIZATION=token_header)
|
||||
self.assertHttpStatus(response, status.HTTP_200_OK)
|
||||
|
||||
|
||||
class ModuleTest(APIViewTestCases.APIViewTestCase):
|
||||
model = Module
|
||||
@@ -2376,6 +2422,33 @@ class CableTest(APIViewTestCases.APIViewTestCase):
|
||||
]
|
||||
|
||||
|
||||
class CableTerminationTest(
|
||||
APIViewTestCases.GetObjectViewTestCase,
|
||||
APIViewTestCases.ListObjectsViewTestCase,
|
||||
):
|
||||
model = CableTermination
|
||||
brief_fields = ['cable', 'cable_end', 'display', 'id', 'termination_id', 'termination_type', 'url']
|
||||
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
device1 = create_test_device('Device 1')
|
||||
device2 = create_test_device('Device 2')
|
||||
|
||||
interfaces = []
|
||||
for device in (device1, device2):
|
||||
for i in range(0, 10):
|
||||
interfaces.append(Interface(device=device, type=InterfaceTypeChoices.TYPE_1GE_FIXED, name=f'eth{i}'))
|
||||
Interface.objects.bulk_create(interfaces)
|
||||
|
||||
cables = (
|
||||
Cable(a_terminations=[interfaces[0]], b_terminations=[interfaces[10]], label='Cable 1'),
|
||||
Cable(a_terminations=[interfaces[1]], b_terminations=[interfaces[11]], label='Cable 2'),
|
||||
Cable(a_terminations=[interfaces[2]], b_terminations=[interfaces[12]], label='Cable 3'),
|
||||
)
|
||||
for cable in cables:
|
||||
cable.save()
|
||||
|
||||
|
||||
class ConnectedDeviceTest(APITestCase):
|
||||
|
||||
@classmethod
|
||||
|
||||
@@ -1919,18 +1919,21 @@ class PowerOutletTemplateTestCase(TestCase, DeviceComponentTemplateFilterSetTest
|
||||
device_type=device_types[0],
|
||||
name='Power Outlet 1',
|
||||
feed_leg=PowerOutletFeedLegChoices.FEED_LEG_A,
|
||||
color=ColorChoices.COLOR_RED,
|
||||
description='foobar1'
|
||||
),
|
||||
PowerOutletTemplate(
|
||||
device_type=device_types[1],
|
||||
name='Power Outlet 2',
|
||||
feed_leg=PowerOutletFeedLegChoices.FEED_LEG_B,
|
||||
color=ColorChoices.COLOR_GREEN,
|
||||
description='foobar2'
|
||||
),
|
||||
PowerOutletTemplate(
|
||||
device_type=device_types[2],
|
||||
name='Power Outlet 3',
|
||||
feed_leg=PowerOutletFeedLegChoices.FEED_LEG_C,
|
||||
color=ColorChoices.COLOR_BLUE,
|
||||
description='foobar3'
|
||||
),
|
||||
))
|
||||
@@ -1943,6 +1946,10 @@ class PowerOutletTemplateTestCase(TestCase, DeviceComponentTemplateFilterSetTest
|
||||
params = {'feed_leg': [PowerOutletFeedLegChoices.FEED_LEG_A, PowerOutletFeedLegChoices.FEED_LEG_B]}
|
||||
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2)
|
||||
|
||||
def test_color(self):
|
||||
params = {'color': [ColorChoices.COLOR_RED, ColorChoices.COLOR_GREEN]}
|
||||
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2)
|
||||
|
||||
|
||||
class InterfaceTemplateTestCase(TestCase, DeviceComponentTemplateFilterSetTests, ChangeLoggedFilterSetTests):
|
||||
queryset = InterfaceTemplate.objects.all()
|
||||
|
||||
@@ -7,13 +7,14 @@ from django.test import override_settings, tag
|
||||
from django.urls import reverse
|
||||
from netaddr import EUI
|
||||
|
||||
from core.models import ObjectType
|
||||
from dcim.choices import *
|
||||
from dcim.constants import *
|
||||
from dcim.models import *
|
||||
from ipam.models import ASN, RIR, VLAN, VRF
|
||||
from netbox.choices import CSVDelimiterChoices, ImportFormatChoices, WeightUnitChoices
|
||||
from tenancy.models import Tenant
|
||||
from users.models import User
|
||||
from users.models import ObjectPermission, User
|
||||
from utilities.testing import ViewTestCases, create_tags, create_test_device, post_data
|
||||
from wireless.models import WirelessLAN
|
||||
|
||||
@@ -3728,3 +3729,29 @@ class MACAddressTestCase(ViewTestCases.PrimaryObjectViewTestCase):
|
||||
cls.bulk_edit_data = {
|
||||
'description': 'New description',
|
||||
}
|
||||
|
||||
@tag('regression') # Issue #20542
|
||||
@override_settings(EXEMPT_VIEW_PERMISSIONS=['*'], EXEMPT_EXCLUDE_MODELS=[])
|
||||
def test_create_macaddress_via_quickadd(self):
|
||||
"""
|
||||
Test creating a MAC address via quick-add modal (e.g., from Interface form).
|
||||
Regression test for issue #20542 where form prefix was missing in POST handler.
|
||||
"""
|
||||
obj_perm = ObjectPermission(name='Test permission', actions=['add'])
|
||||
obj_perm.save()
|
||||
obj_perm.users.add(self.user)
|
||||
obj_perm.object_types.add(ObjectType.objects.get_for_model(self.model))
|
||||
|
||||
# Simulate quick-add form submission with 'quickadd-' prefix
|
||||
formatted_data = post_data(self.form_data)
|
||||
quickadd_data = {f'quickadd-{k}': v for k, v in formatted_data.items()}
|
||||
quickadd_data['_quickadd'] = 'True'
|
||||
|
||||
initial_count = self._get_queryset().count()
|
||||
url = f"{self._get_url('add')}?_quickadd=True&target=id_primary_mac_address"
|
||||
response = self.client.post(url, data=quickadd_data)
|
||||
|
||||
# Should successfully create the MAC address and return the quick_add_created template
|
||||
self.assertHttpStatus(response, 200)
|
||||
self.assertIn(b'quick-add-object', response.content)
|
||||
self.assertEqual(initial_count + 1, self._get_queryset().count())
|
||||
|
||||
@@ -4,6 +4,7 @@ from rest_framework.renderers import JSONRenderer
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.status import HTTP_400_BAD_REQUEST
|
||||
|
||||
from netbox.api.authentication import TokenWritePermission
|
||||
from netbox.api.renderers import TextRenderer
|
||||
from .serializers import ConfigTemplateSerializer
|
||||
|
||||
@@ -64,12 +65,24 @@ class RenderConfigMixin(ConfigTemplateRenderMixin):
|
||||
"""
|
||||
Provides a /render-config/ endpoint for REST API views whose model may have a ConfigTemplate assigned.
|
||||
"""
|
||||
|
||||
def get_permissions(self):
|
||||
# For render_config action, check only token write ability (not model permissions)
|
||||
if self.action == 'render_config':
|
||||
return [TokenWritePermission()]
|
||||
return super().get_permissions()
|
||||
|
||||
@action(detail=True, methods=['post'], url_path='render-config', renderer_classes=[JSONRenderer, TextRenderer])
|
||||
def render_config(self, request, pk):
|
||||
"""
|
||||
Resolve and render the preferred ConfigTemplate for this Device.
|
||||
"""
|
||||
# Override restrict() on the default queryset to enforce the render_config & view actions
|
||||
self.queryset = self.queryset.model.objects.restrict(request.user, 'render_config').restrict(
|
||||
request.user, 'view'
|
||||
)
|
||||
instance = self.get_object()
|
||||
|
||||
object_type = instance._meta.model_name
|
||||
configtemplate = instance.get_config_template()
|
||||
if not configtemplate:
|
||||
|
||||
@@ -1,10 +1,8 @@
|
||||
from django.urls import include, path
|
||||
|
||||
from core.api.views import ObjectTypeViewSet
|
||||
from netbox.api.routers import NetBoxRouter
|
||||
from . import views
|
||||
|
||||
|
||||
router = NetBoxRouter()
|
||||
router.APIRootView = views.ExtrasRootView
|
||||
|
||||
@@ -29,9 +27,6 @@ router.register('config-context-profiles', views.ConfigContextProfileViewSet)
|
||||
router.register('config-templates', views.ConfigTemplateViewSet)
|
||||
router.register('scripts', views.ScriptViewSet, basename='script')
|
||||
|
||||
# TODO: Remove in NetBox v4.5
|
||||
router.register('object-types', ObjectTypeViewSet)
|
||||
|
||||
app_name = 'extras-api'
|
||||
urlpatterns = [
|
||||
path('dashboard/', views.DashboardView.as_view(), name='dashboard'),
|
||||
|
||||
@@ -16,7 +16,7 @@ from rq import Worker
|
||||
from extras import filtersets
|
||||
from extras.jobs import ScriptJob
|
||||
from extras.models import *
|
||||
from netbox.api.authentication import IsAuthenticatedOrLoginNotRequired
|
||||
from netbox.api.authentication import IsAuthenticatedOrLoginNotRequired, TokenWritePermission
|
||||
from netbox.api.features import SyncedDataMixin
|
||||
from netbox.api.metadata import ContentTypeMetadata
|
||||
from netbox.api.renderers import TextRenderer
|
||||
@@ -238,13 +238,22 @@ class ConfigTemplateViewSet(SyncedDataMixin, ConfigTemplateRenderMixin, NetBoxMo
|
||||
serializer_class = serializers.ConfigTemplateSerializer
|
||||
filterset_class = filtersets.ConfigTemplateFilterSet
|
||||
|
||||
def get_permissions(self):
|
||||
# For render action, check only token write ability (not model permissions)
|
||||
if self.action == 'render':
|
||||
return [TokenWritePermission()]
|
||||
return super().get_permissions()
|
||||
|
||||
@action(detail=True, methods=['post'], renderer_classes=[JSONRenderer, TextRenderer])
|
||||
def render(self, request, pk):
|
||||
"""
|
||||
Render a ConfigTemplate using the context data provided (if any). If the client requests "text/plain" data,
|
||||
return the raw rendered content, rather than serialized JSON.
|
||||
"""
|
||||
# Override restrict() on the default queryset to enforce the render & view actions
|
||||
self.queryset = self.queryset.model.objects.restrict(request.user, 'render').restrict(request.user, 'view')
|
||||
configtemplate = self.get_object()
|
||||
|
||||
context = request.data
|
||||
|
||||
return self.render_configtemplate(request, configtemplate, context)
|
||||
|
||||
@@ -2,6 +2,7 @@ from typing import TYPE_CHECKING, Annotated, List
|
||||
|
||||
import strawberry
|
||||
import strawberry_django
|
||||
from strawberry.types import Info
|
||||
|
||||
__all__ = (
|
||||
'ConfigContextMixin',
|
||||
@@ -37,7 +38,7 @@ class CustomFieldsMixin:
|
||||
class ImageAttachmentsMixin:
|
||||
|
||||
@strawberry_django.field
|
||||
def image_attachments(self, info) -> List[Annotated["ImageAttachmentType", strawberry.lazy('.types')]]:
|
||||
def image_attachments(self, info: Info) -> List[Annotated['ImageAttachmentType', strawberry.lazy('.types')]]:
|
||||
return self.images.restrict(info.context.request.user, 'view')
|
||||
|
||||
|
||||
@@ -45,17 +46,17 @@ class ImageAttachmentsMixin:
|
||||
class JournalEntriesMixin:
|
||||
|
||||
@strawberry_django.field
|
||||
def journal_entries(self, info) -> List[Annotated["JournalEntryType", strawberry.lazy('.types')]]:
|
||||
def journal_entries(self, info: Info) -> List[Annotated['JournalEntryType', strawberry.lazy('.types')]]:
|
||||
return self.journal_entries.all()
|
||||
|
||||
|
||||
@strawberry.type
|
||||
class TagsMixin:
|
||||
|
||||
tags: List[Annotated["TagType", strawberry.lazy('.types')]]
|
||||
tags: List[Annotated['TagType', strawberry.lazy('.types')]]
|
||||
|
||||
|
||||
@strawberry.type
|
||||
class ContactsMixin:
|
||||
|
||||
contacts: List[Annotated["ContactAssignmentType", strawberry.lazy('tenancy.graphql.types')]]
|
||||
contacts: List[Annotated['ContactAssignmentType', strawberry.lazy('tenancy.graphql.types')]]
|
||||
|
||||
@@ -1,9 +1,39 @@
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.contrib.postgres.fields.ranges import RangeField
|
||||
from django.db.models import CharField, JSONField, Lookup
|
||||
from django.db.models.fields.json import KeyTextTransform
|
||||
|
||||
from .fields import CachedValueField
|
||||
|
||||
|
||||
class RangeContains(Lookup):
|
||||
"""
|
||||
Filter ArrayField(RangeField) columns where ANY element-range contains the scalar RHS.
|
||||
|
||||
Usage (ORM):
|
||||
Model.objects.filter(<range_array_field>__range_contains=<scalar>)
|
||||
|
||||
Works with int4range[], int8range[], daterange[], tstzrange[], etc.
|
||||
"""
|
||||
|
||||
lookup_name = 'range_contains'
|
||||
|
||||
def as_sql(self, compiler, connection):
|
||||
# Compile LHS (the array-of-ranges column/expression) and RHS (scalar)
|
||||
lhs, lhs_params = self.process_lhs(compiler, connection)
|
||||
rhs, rhs_params = self.process_rhs(compiler, connection)
|
||||
|
||||
# Guard: only allow ArrayField whose base_field is a PostgreSQL RangeField
|
||||
field = getattr(self.lhs, 'output_field', None)
|
||||
if not (isinstance(field, ArrayField) and isinstance(field.base_field, RangeField)):
|
||||
raise TypeError('range_contains is only valid for ArrayField(RangeField) columns')
|
||||
|
||||
# Range-contains-element using EXISTS + UNNEST keeps the range on the LHS: r @> value
|
||||
sql = f"EXISTS (SELECT 1 FROM unnest({lhs}) AS r WHERE r @> {rhs})"
|
||||
params = lhs_params + rhs_params
|
||||
return sql, params
|
||||
|
||||
|
||||
class Empty(Lookup):
|
||||
"""
|
||||
Filter on whether a string is empty.
|
||||
@@ -25,7 +55,7 @@ class JSONEmpty(Lookup):
|
||||
|
||||
A key is considered empty if it is "", null, or does not exist.
|
||||
"""
|
||||
lookup_name = "empty"
|
||||
lookup_name = 'empty'
|
||||
|
||||
def as_sql(self, compiler, connection):
|
||||
# self.lhs.lhs is the parent expression (could be a JSONField or another KeyTransform)
|
||||
@@ -69,6 +99,7 @@ class NetContainsOrEquals(Lookup):
|
||||
return 'CAST(%s AS INET) >>= %s' % (lhs, rhs), params
|
||||
|
||||
|
||||
ArrayField.register_lookup(RangeContains)
|
||||
CharField.register_lookup(Empty)
|
||||
JSONField.register_lookup(JSONEmpty)
|
||||
CachedValueField.register_lookup(NetHost)
|
||||
|
||||
114
netbox/extras/migrations/0134_ci_collations.py
Normal file
114
netbox/extras/migrations/0134_ci_collations.py
Normal file
@@ -0,0 +1,114 @@
|
||||
import django.core.validators
|
||||
import re
|
||||
from django.db import migrations, models
|
||||
|
||||
PATTERN_OPS_INDEXES = [
|
||||
'extras_configcontext_name_4bbfe25d_like',
|
||||
'extras_configcontextprofile_name_070de83b_like',
|
||||
'extras_customfield_name_2fe72707_like',
|
||||
'extras_customfieldchoiceset_name_963e63ea_like',
|
||||
'extras_customlink_name_daed2d18_like',
|
||||
'extras_eventrule_name_899453c6_like',
|
||||
'extras_notificationgroup_name_70b0a3f9_like',
|
||||
'extras_savedfilter_name_8a4bbd09_like',
|
||||
'extras_savedfilter_slug_4f93a959_like',
|
||||
'extras_tag_name_9550b3d9_like',
|
||||
'extras_tag_slug_aaa5b7e9_like',
|
||||
'extras_webhook_name_82cf60b5_like',
|
||||
]
|
||||
|
||||
|
||||
def remove_indexes(apps, schema_editor):
|
||||
for idx in PATTERN_OPS_INDEXES:
|
||||
schema_editor.execute(f'DROP INDEX IF EXISTS {idx}')
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('extras', '0133_make_cf_minmax_decimal'),
|
||||
('dcim', '0217_ci_collations'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
code=remove_indexes,
|
||||
reverse_code=migrations.RunPython.noop,
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='configcontext',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='configcontextprofile',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='customfield',
|
||||
name='name',
|
||||
field=models.CharField(
|
||||
db_collation='ci_natural_sort',
|
||||
max_length=50,
|
||||
unique=True,
|
||||
validators=[
|
||||
django.core.validators.RegexValidator(
|
||||
flags=re.RegexFlag['IGNORECASE'],
|
||||
message='Only alphanumeric characters and underscores are allowed.',
|
||||
regex='^[a-z0-9_]+$',
|
||||
),
|
||||
django.core.validators.RegexValidator(
|
||||
flags=re.RegexFlag['IGNORECASE'],
|
||||
inverse_match=True,
|
||||
message='Double underscores are not permitted in custom field names.',
|
||||
regex='__',
|
||||
),
|
||||
],
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='customfieldchoiceset',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='customlink',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='eventrule',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=150, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='notificationgroup',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='savedfilter',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='savedfilter',
|
||||
name='slug',
|
||||
field=models.SlugField(db_collation='case_insensitive', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='tag',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='tag',
|
||||
name='slug',
|
||||
field=models.SlugField(allow_unicode=True, db_collation='case_insensitive', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='webhook',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=150, unique=True),
|
||||
),
|
||||
]
|
||||
@@ -35,7 +35,8 @@ class ConfigContextProfile(SyncedDataMixin, PrimaryModel):
|
||||
name = models.CharField(
|
||||
verbose_name=_('name'),
|
||||
max_length=100,
|
||||
unique=True
|
||||
unique=True,
|
||||
db_collation='ci_natural_sort',
|
||||
)
|
||||
description = models.CharField(
|
||||
verbose_name=_('description'),
|
||||
@@ -77,7 +78,8 @@ class ConfigContext(SyncedDataMixin, CloningMixin, CustomLinksMixin, ChangeLogge
|
||||
name = models.CharField(
|
||||
verbose_name=_('name'),
|
||||
max_length=100,
|
||||
unique=True
|
||||
unique=True,
|
||||
db_collation='ci_natural_sort',
|
||||
)
|
||||
profile = models.ForeignKey(
|
||||
to='extras.ConfigContextProfile',
|
||||
|
||||
@@ -94,6 +94,7 @@ class CustomField(CloningMixin, ExportTemplatesMixin, ChangeLoggedModel):
|
||||
verbose_name=_('name'),
|
||||
max_length=50,
|
||||
unique=True,
|
||||
db_collation='ci_natural_sort',
|
||||
help_text=_('Internal field name'),
|
||||
validators=(
|
||||
RegexValidator(
|
||||
@@ -779,7 +780,8 @@ class CustomFieldChoiceSet(CloningMixin, ExportTemplatesMixin, ChangeLoggedModel
|
||||
"""
|
||||
name = models.CharField(
|
||||
max_length=100,
|
||||
unique=True
|
||||
unique=True,
|
||||
db_collation='ci_natural_sort',
|
||||
)
|
||||
description = models.CharField(
|
||||
max_length=200,
|
||||
|
||||
@@ -59,7 +59,8 @@ class EventRule(CustomFieldsMixin, ExportTemplatesMixin, TagsMixin, ChangeLogged
|
||||
name = models.CharField(
|
||||
verbose_name=_('name'),
|
||||
max_length=150,
|
||||
unique=True
|
||||
unique=True,
|
||||
db_collation='ci_natural_sort',
|
||||
)
|
||||
description = models.CharField(
|
||||
verbose_name=_('description'),
|
||||
@@ -164,7 +165,8 @@ class Webhook(CustomFieldsMixin, ExportTemplatesMixin, TagsMixin, ChangeLoggedMo
|
||||
name = models.CharField(
|
||||
verbose_name=_('name'),
|
||||
max_length=150,
|
||||
unique=True
|
||||
unique=True,
|
||||
db_collation='ci_natural_sort',
|
||||
)
|
||||
description = models.CharField(
|
||||
verbose_name=_('description'),
|
||||
@@ -307,7 +309,8 @@ class CustomLink(CloningMixin, ExportTemplatesMixin, ChangeLoggedModel):
|
||||
name = models.CharField(
|
||||
verbose_name=_('name'),
|
||||
max_length=100,
|
||||
unique=True
|
||||
unique=True,
|
||||
db_collation='ci_natural_sort',
|
||||
)
|
||||
enabled = models.BooleanField(
|
||||
verbose_name=_('enabled'),
|
||||
@@ -468,12 +471,14 @@ class SavedFilter(CloningMixin, ExportTemplatesMixin, ChangeLoggedModel):
|
||||
name = models.CharField(
|
||||
verbose_name=_('name'),
|
||||
max_length=100,
|
||||
unique=True
|
||||
unique=True,
|
||||
db_collation='ci_natural_sort',
|
||||
)
|
||||
slug = models.SlugField(
|
||||
verbose_name=_('slug'),
|
||||
max_length=100,
|
||||
unique=True
|
||||
unique=True,
|
||||
db_collation='case_insensitive',
|
||||
)
|
||||
description = models.CharField(
|
||||
verbose_name=_('description'),
|
||||
|
||||
@@ -125,7 +125,8 @@ class NotificationGroup(ChangeLoggedModel):
|
||||
name = models.CharField(
|
||||
verbose_name=_('name'),
|
||||
max_length=100,
|
||||
unique=True
|
||||
unique=True,
|
||||
db_collation='ci_natural_sort',
|
||||
)
|
||||
description = models.CharField(
|
||||
verbose_name=_('description'),
|
||||
|
||||
@@ -2,7 +2,7 @@ from django.conf import settings
|
||||
from django.db import models
|
||||
from django.urls import reverse
|
||||
from django.utils.text import slugify
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.utils.translation import gettext_lazy as _, pgettext_lazy
|
||||
from taggit.models import TagBase, GenericTaggedItemBase
|
||||
|
||||
from netbox.choices import ColorChoices
|
||||
@@ -25,6 +25,21 @@ class Tag(CloningMixin, ExportTemplatesMixin, ChangeLoggedModel, TagBase):
|
||||
id = models.BigAutoField(
|
||||
primary_key=True
|
||||
)
|
||||
# Override TagBase.name to set db_collation
|
||||
name = models.CharField(
|
||||
verbose_name=pgettext_lazy("A tag name", "name"),
|
||||
unique=True,
|
||||
max_length=100,
|
||||
db_collation='ci_natural_sort',
|
||||
)
|
||||
# Override TagBase.slug to set db_collation
|
||||
slug = models.SlugField(
|
||||
verbose_name=pgettext_lazy("A tag slug", "slug"),
|
||||
unique=True,
|
||||
max_length=100,
|
||||
allow_unicode=True,
|
||||
db_collation='case_insensitive',
|
||||
)
|
||||
color = ColorField(
|
||||
verbose_name=_('color'),
|
||||
default=ColorChoices.COLOR_GREY
|
||||
|
||||
@@ -90,7 +90,7 @@ class ConfigContextModelQuerySet(RestrictedQuerySet):
|
||||
ConfigContext.objects.filter(
|
||||
self._get_config_context_filters()
|
||||
).annotate(
|
||||
_data=EmptyGroupByJSONBAgg('data', ordering=['weight', 'name'])
|
||||
_data=EmptyGroupByJSONBAgg('data', order_by=['weight', 'name'])
|
||||
).values("_data").order_by()
|
||||
)
|
||||
)
|
||||
|
||||
@@ -1,12 +1,9 @@
|
||||
import inspect
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
|
||||
import yaml
|
||||
from django import forms
|
||||
from django.conf import settings
|
||||
from django.core.files.storage import storages
|
||||
from django.core.validators import RegexValidator
|
||||
from django.utils import timezone
|
||||
@@ -329,6 +326,9 @@ class BaseScript:
|
||||
# Declare the placeholder for the current request
|
||||
self.request = None
|
||||
|
||||
# Initiate the storage backend (local, S3, etc) as a class attr
|
||||
self.storage = storages.create_storage(storages.backends["scripts"])
|
||||
|
||||
# Compile test methods and initialize results skeleton
|
||||
for method in dir(self):
|
||||
if method.startswith('test_') and callable(getattr(self, method)):
|
||||
@@ -394,8 +394,7 @@ class BaseScript:
|
||||
return inspect.getfile(self.__class__)
|
||||
|
||||
def findsource(self, object):
|
||||
storage = storages.create_storage(storages.backends["scripts"])
|
||||
with storage.open(os.path.basename(self.filename), 'r') as f:
|
||||
with self.storage.open(os.path.basename(self.filename), 'r') as f:
|
||||
data = f.read()
|
||||
|
||||
# Break the source code into lines
|
||||
@@ -488,7 +487,7 @@ class BaseScript:
|
||||
if self.fieldsets:
|
||||
fieldsets.extend(self.fieldsets)
|
||||
else:
|
||||
fields = list(name for name, _ in self._get_vars().items())
|
||||
fields = list(name for name, __ in self._get_vars().items())
|
||||
fieldsets.append((_('Script Data'), fields))
|
||||
|
||||
# Append the default fieldset if defined in the Meta class
|
||||
@@ -580,40 +579,6 @@ class BaseScript:
|
||||
self._log(message, obj, level=LogLevelChoices.LOG_FAILURE)
|
||||
self.failed = True
|
||||
|
||||
#
|
||||
# Convenience functions
|
||||
#
|
||||
|
||||
def load_yaml(self, filename):
|
||||
"""
|
||||
Return data from a YAML file
|
||||
"""
|
||||
# TODO: DEPRECATED: Remove this method in v4.5
|
||||
self._log(
|
||||
_("load_yaml is deprecated and will be removed in v4.5"),
|
||||
level=LogLevelChoices.LOG_WARNING
|
||||
)
|
||||
file_path = os.path.join(settings.SCRIPTS_ROOT, filename)
|
||||
with open(file_path, 'r') as datafile:
|
||||
data = yaml.load(datafile, Loader=yaml.SafeLoader)
|
||||
|
||||
return data
|
||||
|
||||
def load_json(self, filename):
|
||||
"""
|
||||
Return data from a JSON file
|
||||
"""
|
||||
# TODO: DEPRECATED: Remove this method in v4.5
|
||||
self._log(
|
||||
_("load_json is deprecated and will be removed in v4.5"),
|
||||
level=LogLevelChoices.LOG_WARNING
|
||||
)
|
||||
file_path = os.path.join(settings.SCRIPTS_ROOT, filename)
|
||||
with open(file_path, 'r') as datafile:
|
||||
data = json.load(datafile)
|
||||
|
||||
return data
|
||||
|
||||
#
|
||||
# Legacy Report functionality
|
||||
#
|
||||
|
||||
@@ -3,6 +3,7 @@ import datetime
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.urls import reverse
|
||||
from django.utils.timezone import make_aware, now
|
||||
from rest_framework import status
|
||||
|
||||
from core.choices import ManagedFileRootPathChoices
|
||||
from core.events import *
|
||||
@@ -11,7 +12,8 @@ from dcim.models import Device, DeviceRole, DeviceType, Manufacturer, Rack, Loca
|
||||
from extras.choices import *
|
||||
from extras.models import *
|
||||
from extras.scripts import BooleanVar, IntegerVar, Script as PythonClass, StringVar
|
||||
from users.models import Group, User
|
||||
from users.constants import TOKEN_PREFIX
|
||||
from users.models import Group, Token, User
|
||||
from utilities.testing import APITestCase, APIViewTestCases
|
||||
|
||||
|
||||
@@ -854,6 +856,47 @@ class ConfigTemplateTest(APIViewTestCases.APIViewTestCase):
|
||||
)
|
||||
ConfigTemplate.objects.bulk_create(config_templates)
|
||||
|
||||
def test_render(self):
|
||||
configtemplate = ConfigTemplate.objects.first()
|
||||
|
||||
self.add_permissions('extras.render_configtemplate', 'extras.view_configtemplate')
|
||||
url = reverse('extras-api:configtemplate-render', kwargs={'pk': configtemplate.pk})
|
||||
response = self.client.post(url, {'foo': 'bar'}, format='json', **self.header)
|
||||
self.assertHttpStatus(response, status.HTTP_200_OK)
|
||||
self.assertEqual(response.data['content'], 'Foo: bar')
|
||||
|
||||
def test_render_without_permission(self):
|
||||
configtemplate = ConfigTemplate.objects.first()
|
||||
|
||||
# No permissions added - user has no render permission
|
||||
url = reverse('extras-api:configtemplate-render', kwargs={'pk': configtemplate.pk})
|
||||
response = self.client.post(url, {'foo': 'bar'}, format='json', **self.header)
|
||||
self.assertHttpStatus(response, status.HTTP_404_NOT_FOUND)
|
||||
|
||||
def test_render_token_write_enabled(self):
|
||||
configtemplate = ConfigTemplate.objects.first()
|
||||
|
||||
self.add_permissions('extras.render_configtemplate', 'extras.view_configtemplate')
|
||||
url = reverse('extras-api:configtemplate-render', kwargs={'pk': configtemplate.pk})
|
||||
|
||||
# Request without token auth should fail with PermissionDenied
|
||||
response = self.client.post(url, {'foo': 'bar'}, format='json')
|
||||
self.assertHttpStatus(response, status.HTTP_403_FORBIDDEN)
|
||||
|
||||
# Create token with write_enabled=False
|
||||
token = Token.objects.create(version=2, user=self.user, write_enabled=False)
|
||||
token_header = f'Bearer {TOKEN_PREFIX}{token.key}.{token.token}'
|
||||
|
||||
# Request with write-disabled token should fail
|
||||
response = self.client.post(url, {'foo': 'bar'}, format='json', HTTP_AUTHORIZATION=token_header)
|
||||
self.assertHttpStatus(response, status.HTTP_403_FORBIDDEN)
|
||||
|
||||
# Enable write and retry
|
||||
token.write_enabled = True
|
||||
token.save()
|
||||
response = self.client.post(url, {'foo': 'bar'}, format='json', HTTP_AUTHORIZATION=token_header)
|
||||
self.assertHttpStatus(response, status.HTTP_200_OK)
|
||||
|
||||
|
||||
class ScriptTest(APITestCase):
|
||||
|
||||
|
||||
@@ -363,7 +363,7 @@ class EventRuleTest(APITestCase):
|
||||
body = json.loads(request.body)
|
||||
self.assertEqual(body['event'], 'created')
|
||||
self.assertEqual(body['timestamp'], job.kwargs['timestamp'])
|
||||
self.assertEqual(body['model'], 'site')
|
||||
self.assertEqual(body['object_type'], 'dcim.site')
|
||||
self.assertEqual(body['username'], 'testuser')
|
||||
self.assertEqual(body['request_id'], str(request_id))
|
||||
self.assertEqual(body['data']['name'], 'Site 1')
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import logging
|
||||
import tempfile
|
||||
from datetime import date, datetime, timezone
|
||||
from decimal import Decimal
|
||||
|
||||
@@ -9,7 +7,6 @@ from netaddr import IPAddress, IPNetwork
|
||||
|
||||
from dcim.models import DeviceRole
|
||||
from extras.scripts import *
|
||||
from utilities.testing import disable_logging
|
||||
|
||||
CHOICES = (
|
||||
('ff0000', 'Red'),
|
||||
@@ -35,35 +32,6 @@ JSON_DATA = """
|
||||
"""
|
||||
|
||||
|
||||
class ScriptTest(TestCase):
|
||||
|
||||
def test_load_yaml(self):
|
||||
datafile = tempfile.NamedTemporaryFile()
|
||||
datafile.write(bytes(YAML_DATA, 'UTF-8'))
|
||||
datafile.seek(0)
|
||||
|
||||
with disable_logging(level=logging.WARNING):
|
||||
data = Script().load_yaml(datafile.name)
|
||||
self.assertEqual(data, {
|
||||
'Foo': 123,
|
||||
'Bar': 456,
|
||||
'Baz': ['A', 'B', 'C'],
|
||||
})
|
||||
|
||||
def test_load_json(self):
|
||||
datafile = tempfile.NamedTemporaryFile()
|
||||
datafile.write(bytes(JSON_DATA, 'UTF-8'))
|
||||
datafile.seek(0)
|
||||
|
||||
with disable_logging(level=logging.WARNING):
|
||||
data = Script().load_json(datafile.name)
|
||||
self.assertEqual(data, {
|
||||
'Foo': 123,
|
||||
'Bar': 456,
|
||||
'Baz': ['A', 'B', 'C'],
|
||||
})
|
||||
|
||||
|
||||
class ScriptVariablesTest(TestCase):
|
||||
|
||||
def test_stringvar(self):
|
||||
|
||||
@@ -52,7 +52,6 @@ def send_webhook(event_rule, object_type, event_type, data, timestamp, username,
|
||||
'event': WEBHOOK_EVENT_TYPES.get(event_type, event_type),
|
||||
'timestamp': timestamp,
|
||||
'object_type': '.'.join(object_type.natural_key()),
|
||||
'model': object_type.model,
|
||||
'username': username,
|
||||
'request_id': request.id if request else None,
|
||||
'data': data,
|
||||
@@ -100,7 +99,7 @@ def send_webhook(event_rule, object_type, event_type, data, timestamp, username,
|
||||
'data': body.encode('utf8'),
|
||||
}
|
||||
logger.info(
|
||||
f"Sending {params['method']} request to {params['url']} ({context['model']} {context['event']})"
|
||||
f"Sending {params['method']} request to {params['url']} ({context['object_type']} {context['event']})"
|
||||
)
|
||||
logger.debug(params)
|
||||
try:
|
||||
|
||||
@@ -595,6 +595,7 @@ class IPAddressFilterSet(NetBoxModelFilterSet, TenancyFilterSet, ContactModelFil
|
||||
to_field_name='rd',
|
||||
label=_('VRF (RD)'),
|
||||
)
|
||||
assigned_object_type = ContentTypeFilter()
|
||||
device = MultiValueCharFilter(
|
||||
method='filter_device',
|
||||
field_name='name',
|
||||
@@ -908,7 +909,8 @@ class VLANGroupFilterSet(OrganizationalModelFilterSet, TenancyFilterSet):
|
||||
method='filter_scope'
|
||||
)
|
||||
contains_vid = django_filters.NumberFilter(
|
||||
method='filter_contains_vid'
|
||||
field_name='vid_ranges',
|
||||
lookup_expr='range_contains',
|
||||
)
|
||||
|
||||
class Meta:
|
||||
@@ -931,21 +933,6 @@ class VLANGroupFilterSet(OrganizationalModelFilterSet, TenancyFilterSet):
|
||||
scope_id=value
|
||||
)
|
||||
|
||||
def filter_contains_vid(self, queryset, name, value):
|
||||
"""
|
||||
Return all VLANGroups which contain the given VLAN ID.
|
||||
"""
|
||||
table_name = VLANGroup._meta.db_table
|
||||
# TODO: See if this can be optimized without compromising queryset integrity
|
||||
# Expand VLAN ID ranges to query by integer
|
||||
groups = VLANGroup.objects.raw(
|
||||
f'SELECT id FROM {table_name}, unnest(vid_ranges) vid_range WHERE %s <@ vid_range',
|
||||
params=(value,)
|
||||
)
|
||||
return queryset.filter(
|
||||
pk__in=[g.id for g in groups]
|
||||
)
|
||||
|
||||
|
||||
class VLANFilterSet(NetBoxModelFilterSet, TenancyFilterSet):
|
||||
region_id = TreeNodeMultipleChoiceFilter(
|
||||
@@ -1166,6 +1153,7 @@ class ServiceTemplateFilterSet(NetBoxModelFilterSet):
|
||||
|
||||
|
||||
class ServiceFilterSet(ContactModelFilterSet, NetBoxModelFilterSet):
|
||||
parent_object_type = ContentTypeFilter()
|
||||
device = MultiValueCharFilter(
|
||||
method='filter_device',
|
||||
field_name='name',
|
||||
|
||||
@@ -19,7 +19,7 @@ from tenancy.graphql.filter_mixins import ContactFilterMixin, TenancyFilterMixin
|
||||
from virtualization.models import VMInterface
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from netbox.graphql.filter_lookups import IntegerArrayLookup, IntegerLookup
|
||||
from netbox.graphql.filter_lookups import IntegerLookup, IntegerRangeArrayLookup
|
||||
from circuits.graphql.filters import ProviderFilter
|
||||
from core.graphql.filters import ContentTypeFilter
|
||||
from dcim.graphql.filters import SiteFilter
|
||||
@@ -340,7 +340,7 @@ class VLANFilter(TenancyFilterMixin, PrimaryModelFilterMixin):
|
||||
|
||||
@strawberry_django.filter_type(models.VLANGroup, lookups=True)
|
||||
class VLANGroupFilter(ScopedFilterMixin, OrganizationalModelFilterMixin):
|
||||
vid_ranges: Annotated['IntegerArrayLookup', strawberry.lazy('netbox.graphql.filter_lookups')] | None = (
|
||||
vid_ranges: Annotated['IntegerRangeArrayLookup', strawberry.lazy('netbox.graphql.filter_lookups')] | None = (
|
||||
strawberry_django.filter_field()
|
||||
)
|
||||
|
||||
|
||||
@@ -74,7 +74,7 @@ class BaseIPAddressFamilyType:
|
||||
filters=ASNFilter,
|
||||
pagination=True
|
||||
)
|
||||
class ASNType(NetBoxObjectType):
|
||||
class ASNType(NetBoxObjectType, ContactsMixin):
|
||||
asn: BigInt
|
||||
rir: Annotated["RIRType", strawberry.lazy('ipam.graphql.types')] | None
|
||||
tenant: Annotated["TenantType", strawberry.lazy('tenancy.graphql.types')] | None
|
||||
|
||||
100
netbox/ipam/migrations/0083_ci_collations.py
Normal file
100
netbox/ipam/migrations/0083_ci_collations.py
Normal file
@@ -0,0 +1,100 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
PATTERN_OPS_INDEXES = [
|
||||
'ipam_asnrange_name_c7585e73_like',
|
||||
'ipam_asnrange_slug_c8a7d8a1_like',
|
||||
'ipam_rir_name_64a71982_like',
|
||||
'ipam_rir_slug_ff1a369a_like',
|
||||
'ipam_role_name_13784849_like',
|
||||
'ipam_role_slug_309ca14c_like',
|
||||
'ipam_routetarget_name_212be79f_like',
|
||||
'ipam_servicetemplate_name_1a2f3410_like',
|
||||
'ipam_vlangroup_slug_40abcf6b_like',
|
||||
'ipam_vlantranslationpolicy_name_17e0a007_like',
|
||||
'ipam_vrf_rd_0ac1bde1_like',
|
||||
]
|
||||
|
||||
|
||||
def remove_indexes(apps, schema_editor):
|
||||
for idx in PATTERN_OPS_INDEXES:
|
||||
schema_editor.execute(f'DROP INDEX IF EXISTS {idx}')
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('ipam', '0082_add_prefix_network_containment_indexes'),
|
||||
('dcim', '0217_ci_collations'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
code=remove_indexes,
|
||||
reverse_code=migrations.RunPython.noop,
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='asnrange',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='asnrange',
|
||||
name='slug',
|
||||
field=models.SlugField(db_collation='case_insensitive', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='rir',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='rir',
|
||||
name='slug',
|
||||
field=models.SlugField(db_collation='case_insensitive', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='role',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='role',
|
||||
name='slug',
|
||||
field=models.SlugField(db_collation='case_insensitive', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='routetarget',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=21, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='servicetemplate',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='vlan',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=64),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='vlangroup',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='vlangroup',
|
||||
name='slug',
|
||||
field=models.SlugField(db_collation='case_insensitive', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='vlantranslationpolicy',
|
||||
name='name',
|
||||
field=models.CharField(db_collation='ci_natural_sort', max_length=100, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='vrf',
|
||||
name='rd',
|
||||
field=models.CharField(blank=True, db_collation='case_insensitive', max_length=21, null=True, unique=True),
|
||||
),
|
||||
]
|
||||
@@ -18,12 +18,7 @@ class ASNRange(OrganizationalModel):
|
||||
verbose_name=_('name'),
|
||||
max_length=100,
|
||||
unique=True,
|
||||
db_collation="natural_sort"
|
||||
)
|
||||
slug = models.SlugField(
|
||||
verbose_name=_('slug'),
|
||||
max_length=100,
|
||||
unique=True
|
||||
db_collation='ci_natural_sort',
|
||||
)
|
||||
rir = models.ForeignKey(
|
||||
to='ipam.RIR',
|
||||
|
||||
@@ -50,7 +50,8 @@ class ServiceTemplate(ServiceBase, PrimaryModel):
|
||||
name = models.CharField(
|
||||
verbose_name=_('name'),
|
||||
max_length=100,
|
||||
unique=True
|
||||
unique=True,
|
||||
db_collation='ci_natural_sort',
|
||||
)
|
||||
|
||||
class Meta:
|
||||
|
||||
@@ -10,9 +10,9 @@ from django.utils.translation import gettext_lazy as _
|
||||
from dcim.models import Interface, Site, SiteGroup
|
||||
from ipam.choices import *
|
||||
from ipam.constants import *
|
||||
from ipam.querysets import VLANQuerySet, VLANGroupQuerySet
|
||||
from ipam.querysets import VLANGroupQuerySet, VLANQuerySet
|
||||
from netbox.models import OrganizationalModel, PrimaryModel, NetBoxModel
|
||||
from utilities.data import check_ranges_overlap, ranges_to_string
|
||||
from utilities.data import check_ranges_overlap, ranges_to_string, ranges_to_string_list
|
||||
from virtualization.models import VMInterface
|
||||
|
||||
__all__ = (
|
||||
@@ -37,11 +37,12 @@ class VLANGroup(OrganizationalModel):
|
||||
name = models.CharField(
|
||||
verbose_name=_('name'),
|
||||
max_length=100,
|
||||
db_collation="natural_sort"
|
||||
db_collation='ci_natural_sort',
|
||||
)
|
||||
slug = models.SlugField(
|
||||
verbose_name=_('slug'),
|
||||
max_length=100
|
||||
max_length=100,
|
||||
db_collation='case_insensitive',
|
||||
)
|
||||
scope_type = models.ForeignKey(
|
||||
to='contenttypes.ContentType',
|
||||
@@ -164,8 +165,18 @@ class VLANGroup(OrganizationalModel):
|
||||
"""
|
||||
return VLAN.objects.filter(group=self).order_by('vid')
|
||||
|
||||
@property
|
||||
def vid_ranges_items(self):
|
||||
"""
|
||||
Property that converts VID ranges to a list of string representations.
|
||||
"""
|
||||
return ranges_to_string_list(self.vid_ranges)
|
||||
|
||||
@property
|
||||
def vid_ranges_list(self):
|
||||
"""
|
||||
Property that converts VID ranges into a string representation.
|
||||
"""
|
||||
return ranges_to_string(self.vid_ranges)
|
||||
|
||||
|
||||
@@ -204,7 +215,8 @@ class VLAN(PrimaryModel):
|
||||
)
|
||||
name = models.CharField(
|
||||
verbose_name=_('name'),
|
||||
max_length=64
|
||||
max_length=64,
|
||||
db_collation='ci_natural_sort',
|
||||
)
|
||||
tenant = models.ForeignKey(
|
||||
to='tenancy.Tenant',
|
||||
@@ -352,6 +364,7 @@ class VLANTranslationPolicy(PrimaryModel):
|
||||
verbose_name=_('name'),
|
||||
max_length=100,
|
||||
unique=True,
|
||||
db_collation='ci_natural_sort',
|
||||
)
|
||||
|
||||
class Meta:
|
||||
|
||||
@@ -19,11 +19,12 @@ class VRF(PrimaryModel):
|
||||
name = models.CharField(
|
||||
verbose_name=_('name'),
|
||||
max_length=100,
|
||||
db_collation="natural_sort"
|
||||
db_collation='natural_sort',
|
||||
)
|
||||
rd = models.CharField(
|
||||
max_length=VRF_RD_MAX_LENGTH,
|
||||
unique=True,
|
||||
db_collation='case_insensitive',
|
||||
blank=True,
|
||||
null=True,
|
||||
verbose_name=_('route distinguisher'),
|
||||
@@ -75,8 +76,8 @@ class RouteTarget(PrimaryModel):
|
||||
verbose_name=_('name'),
|
||||
max_length=VRF_RD_MAX_LENGTH, # Same format options as VRF RD (RFC 4360 section 4)
|
||||
unique=True,
|
||||
db_collation='ci_natural_sort',
|
||||
help_text=_('Route target value (formatted in accordance with RFC 4360)'),
|
||||
db_collation="natural_sort"
|
||||
)
|
||||
tenant = models.ForeignKey(
|
||||
to='tenancy.Tenant',
|
||||
|
||||
@@ -41,7 +41,8 @@ class VLANGroupTable(TenancyColumnsMixin, NetBoxTable):
|
||||
linkify=True,
|
||||
orderable=False
|
||||
)
|
||||
vid_ranges_list = tables.Column(
|
||||
vid_ranges_list = columns.ArrayColumn(
|
||||
accessor='vid_ranges_items',
|
||||
verbose_name=_('VID Ranges'),
|
||||
orderable=False
|
||||
)
|
||||
|
||||
@@ -1723,6 +1723,10 @@ class VLANGroupTestCase(TestCase, ChangeLoggedFilterSetTests):
|
||||
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2)
|
||||
params = {'contains_vid': 1}
|
||||
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 8)
|
||||
params = {'contains_vid': 12} # 11 is NOT in [1,11)
|
||||
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1)
|
||||
params = {'contains_vid': 4095}
|
||||
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 0)
|
||||
|
||||
def test_region(self):
|
||||
params = {'region': Region.objects.first().pk}
|
||||
|
||||
66
netbox/ipam/tests/test_lookups.py
Normal file
66
netbox/ipam/tests/test_lookups.py
Normal file
@@ -0,0 +1,66 @@
|
||||
from django.test import TestCase
|
||||
from django.db.backends.postgresql.psycopg_any import NumericRange
|
||||
from ipam.models import VLANGroup
|
||||
|
||||
|
||||
class VLANGroupRangeContainsLookupTests(TestCase):
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
# Two ranges: [1,11) and [20,31)
|
||||
cls.g1 = VLANGroup.objects.create(
|
||||
name='VlanGroup-A',
|
||||
slug='VlanGroup-A',
|
||||
vid_ranges=[NumericRange(1, 11), NumericRange(20, 31)],
|
||||
)
|
||||
# One range: [100,201)
|
||||
cls.g2 = VLANGroup.objects.create(
|
||||
name='VlanGroup-B',
|
||||
slug='VlanGroup-B',
|
||||
vid_ranges=[NumericRange(100, 201)],
|
||||
)
|
||||
cls.g_empty = VLANGroup.objects.create(
|
||||
name='VlanGroup-empty',
|
||||
slug='VlanGroup-empty',
|
||||
vid_ranges=[],
|
||||
)
|
||||
|
||||
def test_contains_value_in_first_range(self):
|
||||
"""
|
||||
Tests whether a specific value is contained within the first range in a queried
|
||||
set of VLANGroup objects.
|
||||
"""
|
||||
names = list(
|
||||
VLANGroup.objects.filter(vid_ranges__range_contains=10).values_list('name', flat=True).order_by('name')
|
||||
)
|
||||
self.assertEqual(names, ['VlanGroup-A'])
|
||||
|
||||
def test_contains_value_in_second_range(self):
|
||||
"""
|
||||
Tests if a value exists in the second range of VLANGroup objects and
|
||||
validates the result against the expected list of names.
|
||||
"""
|
||||
names = list(
|
||||
VLANGroup.objects.filter(vid_ranges__range_contains=25).values_list('name', flat=True).order_by('name')
|
||||
)
|
||||
self.assertEqual(names, ['VlanGroup-A'])
|
||||
|
||||
def test_upper_bound_is_exclusive(self):
|
||||
"""
|
||||
Tests if the upper bound of the range is exclusive in the filter method.
|
||||
"""
|
||||
# 11 is NOT in [1,11)
|
||||
self.assertFalse(VLANGroup.objects.filter(vid_ranges__range_contains=11).exists())
|
||||
|
||||
def test_no_match_far_outside(self):
|
||||
"""
|
||||
Tests that no VLANGroup contains a VID within a specified range far outside
|
||||
common VID bounds and returns `False`.
|
||||
"""
|
||||
self.assertFalse(VLANGroup.objects.filter(vid_ranges__range_contains=4095).exists())
|
||||
|
||||
def test_empty_array_never_matches(self):
|
||||
"""
|
||||
Tests the behavior of VLANGroup objects when an empty array is used to match a
|
||||
specific condition.
|
||||
"""
|
||||
self.assertFalse(VLANGroup.objects.filter(pk=self.g_empty.pk, vid_ranges__range_contains=1).exists())
|
||||
@@ -2,47 +2,90 @@ import logging
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
from rest_framework import authentication, exceptions
|
||||
from drf_spectacular.extensions import OpenApiAuthenticationExtension
|
||||
from rest_framework import exceptions
|
||||
from rest_framework.authentication import BaseAuthentication, get_authorization_header
|
||||
from rest_framework.permissions import BasePermission, DjangoObjectPermissions, SAFE_METHODS
|
||||
|
||||
from netbox.config import get_config
|
||||
from users.constants import TOKEN_PREFIX
|
||||
from users.models import Token
|
||||
from utilities.request import get_client_ip
|
||||
|
||||
V1_KEYWORD = 'Token'
|
||||
V2_KEYWORD = 'Bearer'
|
||||
|
||||
class TokenAuthentication(authentication.TokenAuthentication):
|
||||
|
||||
class TokenAuthentication(BaseAuthentication):
|
||||
"""
|
||||
A custom authentication scheme which enforces Token expiration times and source IP restrictions.
|
||||
"""
|
||||
model = Token
|
||||
|
||||
def authenticate(self, request):
|
||||
result = super().authenticate(request)
|
||||
|
||||
if result:
|
||||
token = result[1]
|
||||
|
||||
# Enforce source IP restrictions (if any) set on the token
|
||||
if token.allowed_ips:
|
||||
client_ip = get_client_ip(request)
|
||||
if client_ip is None:
|
||||
raise exceptions.AuthenticationFailed(
|
||||
"Client IP address could not be determined for validation. Check that the HTTP server is "
|
||||
"correctly configured to pass the required header(s)."
|
||||
)
|
||||
if not token.validate_client_ip(client_ip):
|
||||
raise exceptions.AuthenticationFailed(
|
||||
f"Source IP {client_ip} is not permitted to authenticate using this token."
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
def authenticate_credentials(self, key):
|
||||
model = self.get_model()
|
||||
# Authorization header is not present; ignore
|
||||
if not (auth := get_authorization_header(request).split()):
|
||||
return
|
||||
# Unrecognized header; ignore
|
||||
if auth[0].lower() not in (V1_KEYWORD.lower().encode(), V2_KEYWORD.lower().encode()):
|
||||
return
|
||||
# Check for extraneous token content
|
||||
if len(auth) != 2:
|
||||
raise exceptions.AuthenticationFailed(
|
||||
'Invalid authorization header: Must be in the form "Bearer <key>.<token>" or "Token <token>"'
|
||||
)
|
||||
# Extract the key (if v2) & token plaintext from the auth header
|
||||
try:
|
||||
token = model.objects.prefetch_related('user').get(key=key)
|
||||
except model.DoesNotExist:
|
||||
raise exceptions.AuthenticationFailed("Invalid token")
|
||||
auth_value = auth[1].decode()
|
||||
except UnicodeError:
|
||||
raise exceptions.AuthenticationFailed("Invalid authorization header: Token contains invalid characters")
|
||||
|
||||
# Infer token version from presence or absence of prefix
|
||||
version = 2 if auth_value.startswith(TOKEN_PREFIX) else 1
|
||||
|
||||
if version == 1:
|
||||
key, plaintext = None, auth_value
|
||||
else:
|
||||
auth_value = auth_value.removeprefix(TOKEN_PREFIX)
|
||||
try:
|
||||
key, plaintext = auth_value.split('.', 1)
|
||||
except ValueError:
|
||||
raise exceptions.AuthenticationFailed(
|
||||
"Invalid authorization header: Could not parse key from v2 token. Did you mean to use 'Token' "
|
||||
"instead of 'Bearer'?"
|
||||
)
|
||||
|
||||
# Look for a matching token in the database
|
||||
try:
|
||||
qs = Token.objects.prefetch_related('user')
|
||||
if version == 1:
|
||||
# Fetch v1 token by querying plaintext value directly
|
||||
token = qs.get(version=version, plaintext=plaintext)
|
||||
else:
|
||||
# Fetch v2 token by key, then validate the plaintext
|
||||
token = qs.get(version=version, key=key)
|
||||
if not token.validate(plaintext):
|
||||
# Key is valid but plaintext is not. Raise DoesNotExist to guard against key enumeration.
|
||||
raise Token.DoesNotExist()
|
||||
except Token.DoesNotExist:
|
||||
raise exceptions.AuthenticationFailed(f"Invalid v{version} token")
|
||||
|
||||
# Enforce source IP restrictions (if any) set on the token
|
||||
if token.allowed_ips:
|
||||
client_ip = get_client_ip(request)
|
||||
if client_ip is None:
|
||||
raise exceptions.AuthenticationFailed(
|
||||
"Client IP address could not be determined for validation. Check that the HTTP server is "
|
||||
"correctly configured to pass the required header(s)."
|
||||
)
|
||||
if not token.validate_client_ip(client_ip):
|
||||
raise exceptions.AuthenticationFailed(
|
||||
f"Source IP {client_ip} is not permitted to authenticate using this token."
|
||||
)
|
||||
|
||||
# Enforce the Token's expiration time, if one has been set.
|
||||
if token.is_expired:
|
||||
raise exceptions.AuthenticationFailed("Token expired")
|
||||
|
||||
# Update last used, but only once per minute at most. This reduces write load on the database
|
||||
if not token.last_used or (timezone.now() - token.last_used).total_seconds() > 60:
|
||||
@@ -54,11 +97,8 @@ class TokenAuthentication(authentication.TokenAuthentication):
|
||||
else:
|
||||
Token.objects.filter(pk=token.pk).update(last_used=timezone.now())
|
||||
|
||||
# Enforce the Token's expiration time, if one has been set.
|
||||
if token.is_expired:
|
||||
raise exceptions.AuthenticationFailed("Token expired")
|
||||
|
||||
user = token.user
|
||||
|
||||
# When LDAP authentication is active try to load user data from LDAP directory
|
||||
if 'netbox.authentication.LDAPBackend' in settings.REMOTE_AUTH_BACKEND:
|
||||
from netbox.authentication import LDAPBackend
|
||||
@@ -124,6 +164,20 @@ class TokenPermissions(DjangoObjectPermissions):
|
||||
return super().has_object_permission(request, view, obj)
|
||||
|
||||
|
||||
class TokenWritePermission(BasePermission):
|
||||
"""
|
||||
Verify the token has write_enabled for unsafe methods, without requiring specific model permissions.
|
||||
Used for custom actions that accept user data but don't map to standard CRUD operations.
|
||||
"""
|
||||
|
||||
def has_permission(self, request, view):
|
||||
if not isinstance(request.auth, Token):
|
||||
raise exceptions.PermissionDenied(
|
||||
"TokenWritePermission requires token authentication."
|
||||
)
|
||||
return bool(request.method in SAFE_METHODS or request.auth.write_enabled)
|
||||
|
||||
|
||||
class IsAuthenticatedOrLoginNotRequired(BasePermission):
|
||||
"""
|
||||
Returns True if the user is authenticated or LOGIN_REQUIRED is False.
|
||||
@@ -132,3 +186,17 @@ class IsAuthenticatedOrLoginNotRequired(BasePermission):
|
||||
if not settings.LOGIN_REQUIRED:
|
||||
return True
|
||||
return request.user.is_authenticated
|
||||
|
||||
|
||||
class TokenScheme(OpenApiAuthenticationExtension):
|
||||
target_class = 'netbox.api.authentication.TokenAuthentication'
|
||||
name = 'tokenAuth'
|
||||
match_subclasses = True
|
||||
|
||||
def get_security_definition(self, auto_schema):
|
||||
return {
|
||||
'type': 'apiKey',
|
||||
'in': 'header',
|
||||
'name': 'Authorization',
|
||||
'description': '`Token <token>` (v1) or `Bearer <key>.<token>` (v2)',
|
||||
}
|
||||
|
||||
@@ -169,7 +169,7 @@ class IntegerRangeSerializer(serializers.Serializer):
|
||||
if type(data[0]) is not int or type(data[1]) is not int:
|
||||
raise ValidationError(_("Range boundaries must be defined as integers."))
|
||||
|
||||
return NumericRange(data[0], data[1], bounds='[]')
|
||||
return NumericRange(data[0], data[1] + 1, bounds='[)')
|
||||
|
||||
def to_representation(self, instance):
|
||||
return instance.lower, instance.upper - 1
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user