mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2026-01-07 10:51:19 +00:00
Compare commits
4 Commits
feature-un
...
11691-docu
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8912d749f7 | ||
|
|
cd760906fa | ||
|
|
9e744a0665 | ||
|
|
d49d9f3b16 |
20
.github/workflows/ci.yml
vendored
20
.github/workflows/ci.yml
vendored
@@ -115,7 +115,7 @@ jobs:
|
||||
--frozen \
|
||||
mkdocs gh-deploy --force --no-history
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-artifact@v6
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: documentation
|
||||
path: site/
|
||||
@@ -215,7 +215,7 @@ jobs:
|
||||
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
|
||||
- name: Cache frontend dependencies
|
||||
id: cache-frontend-deps
|
||||
uses: actions/cache@v5
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.pnpm-store
|
||||
@@ -248,7 +248,7 @@ jobs:
|
||||
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
|
||||
- name: Cache frontend dependencies
|
||||
id: cache-frontend-deps
|
||||
uses: actions/cache@v5
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.pnpm-store
|
||||
@@ -301,7 +301,7 @@ jobs:
|
||||
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
|
||||
- name: Cache frontend dependencies
|
||||
id: cache-frontend-deps
|
||||
uses: actions/cache@v5
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.pnpm-store
|
||||
@@ -333,7 +333,7 @@ jobs:
|
||||
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
|
||||
- name: Cache frontend dependencies
|
||||
id: cache-frontend-deps
|
||||
uses: actions/cache@v5
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.pnpm-store
|
||||
@@ -476,7 +476,7 @@ jobs:
|
||||
docker cp frontend-extract:/usr/src/paperless/src/documents/static/frontend src/documents/static/frontend/
|
||||
- name: Upload frontend artifact
|
||||
if: steps.build-vars.outputs.can-push == 'true'
|
||||
uses: actions/upload-artifact@v6
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: frontend-compiled
|
||||
path: src/documents/static/frontend/
|
||||
@@ -510,12 +510,12 @@ jobs:
|
||||
sudo apt-get update -qq
|
||||
sudo apt-get install -qq --no-install-recommends gettext liblept5
|
||||
- name: Download frontend artifact
|
||||
uses: actions/download-artifact@v7
|
||||
uses: actions/download-artifact@v6
|
||||
with:
|
||||
name: frontend-compiled
|
||||
path: src/documents/static/frontend/
|
||||
- name: Download documentation artifact
|
||||
uses: actions/download-artifact@v7
|
||||
uses: actions/download-artifact@v6
|
||||
with:
|
||||
name: documentation
|
||||
path: docs/_build/html/
|
||||
@@ -578,7 +578,7 @@ jobs:
|
||||
sudo chown -R 1000:1000 paperless-ngx/
|
||||
tar -cJf paperless-ngx.tar.xz paperless-ngx/
|
||||
- name: Upload release artifact
|
||||
uses: actions/upload-artifact@v6
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: release
|
||||
path: dist/paperless-ngx.tar.xz
|
||||
@@ -595,7 +595,7 @@ jobs:
|
||||
if: github.ref_type == 'tag' && (startsWith(github.ref_name, 'v') || contains(github.ref_name, '-beta.rc'))
|
||||
steps:
|
||||
- name: Download release artifact
|
||||
uses: actions/download-artifact@v7
|
||||
uses: actions/download-artifact@v6
|
||||
with:
|
||||
name: release
|
||||
path: ./
|
||||
|
||||
2
.github/workflows/repo-maintenance.yml
vendored
2
.github/workflows/repo-maintenance.yml
vendored
@@ -37,7 +37,7 @@ jobs:
|
||||
if: github.repository_owner == 'paperless-ngx'
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- uses: dessant/lock-threads@v6
|
||||
- uses: dessant/lock-threads@v5
|
||||
with:
|
||||
issue-inactive-days: '30'
|
||||
pr-inactive-days: '30'
|
||||
|
||||
2
.github/workflows/translate-strings.yml
vendored
2
.github/workflows/translate-strings.yml
vendored
@@ -47,7 +47,7 @@ jobs:
|
||||
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
|
||||
- name: Cache frontend dependencies
|
||||
id: cache-frontend-deps
|
||||
uses: actions/cache@v5
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.pnpm-store
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { NgClass, NgTemplateOutlet, TitleCasePipe } from '@angular/common'
|
||||
import { Component, inject } from '@angular/core'
|
||||
import { FormsModule, ReactiveFormsModule } from '@angular/forms'
|
||||
import { RouterModule } from '@angular/router'
|
||||
import {
|
||||
NgbDropdownModule,
|
||||
NgbPaginationModule,
|
||||
@@ -29,6 +30,7 @@ import { ManagementListComponent } from '../management-list/management-list.comp
|
||||
TitleCasePipe,
|
||||
FormsModule,
|
||||
ReactiveFormsModule,
|
||||
RouterModule,
|
||||
NgClass,
|
||||
NgTemplateOutlet,
|
||||
NgbDropdownModule,
|
||||
|
||||
@@ -42,7 +42,13 @@
|
||||
<button (click)="editField(field)" *pngxIfPermissions="{ action: PermissionAction.Change, type: PermissionType.CustomField }" ngbDropdownItem i18n>Edit</button>
|
||||
<button class="text-danger" (click)="deleteField(field)" *pngxIfPermissions="{ action: PermissionAction.Delete, type: PermissionType.CustomField }" ngbDropdownItem i18n>Delete</button>
|
||||
@if (field.document_count > 0) {
|
||||
<button (click)="filterDocuments(field)" *pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.Document }" ngbDropdownItem i18n>Filter Documents ({{ field.document_count }})</button>
|
||||
<a
|
||||
*pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.Document }"
|
||||
ngbDropdownItem
|
||||
[routerLink]="getDocumentFilterUrl(field)"
|
||||
i18n
|
||||
>Filter Documents ({{ field.document_count }})</a
|
||||
>
|
||||
}
|
||||
</div>
|
||||
</div>
|
||||
@@ -57,9 +63,13 @@
|
||||
</div>
|
||||
@if (field.document_count > 0) {
|
||||
<div class="btn-group d-none d-sm-inline-block ms-2">
|
||||
<button class="btn btn-sm btn-outline-secondary" type="button" (click)="filterDocuments(field)">
|
||||
<i-bs width="1em" height="1em" name="filter"></i-bs> <ng-container i18n>Documents</ng-container><span class="badge bg-light text-secondary ms-2">{{ field.document_count }}</span>
|
||||
</button>
|
||||
<a
|
||||
class="btn btn-sm btn-outline-secondary"
|
||||
[routerLink]="getDocumentFilterUrl(field)"
|
||||
>
|
||||
<i-bs width="1em" height="1em" name="filter"></i-bs> <ng-container i18n>Documents</ng-container
|
||||
><span class="badge bg-light text-secondary ms-2">{{ field.document_count }}</span>
|
||||
</a>
|
||||
</div>
|
||||
}
|
||||
</div>
|
||||
|
||||
@@ -4,6 +4,7 @@ import { provideHttpClient, withInterceptorsFromDi } from '@angular/common/http'
|
||||
import { provideHttpClientTesting } from '@angular/common/http/testing'
|
||||
import { FormsModule, ReactiveFormsModule } from '@angular/forms'
|
||||
import { By } from '@angular/platform-browser'
|
||||
import { RouterTestingModule } from '@angular/router/testing'
|
||||
import {
|
||||
NgbModal,
|
||||
NgbModalModule,
|
||||
@@ -61,6 +62,7 @@ describe('CustomFieldsComponent', () => {
|
||||
NgbModalModule,
|
||||
NgbPopoverModule,
|
||||
NgxBootstrapIconsModule.pick(allIcons),
|
||||
RouterTestingModule,
|
||||
CustomFieldsComponent,
|
||||
IfPermissionsDirective,
|
||||
PageHeaderComponent,
|
||||
@@ -108,7 +110,9 @@ describe('CustomFieldsComponent', () => {
|
||||
const toastInfoSpy = jest.spyOn(toastService, 'showInfo')
|
||||
const reloadSpy = jest.spyOn(component, 'reload')
|
||||
|
||||
const createButton = fixture.debugElement.queryAll(By.css('button'))[1]
|
||||
const createButton = fixture.debugElement
|
||||
.queryAll(By.css('button'))
|
||||
.find((btn) => btn.nativeElement.textContent.trim().includes('Add Field'))
|
||||
createButton.triggerEventHandler('click')
|
||||
|
||||
expect(modal).not.toBeUndefined()
|
||||
@@ -133,7 +137,11 @@ describe('CustomFieldsComponent', () => {
|
||||
const toastInfoSpy = jest.spyOn(toastService, 'showInfo')
|
||||
const reloadSpy = jest.spyOn(component, 'reload')
|
||||
|
||||
const editButton = fixture.debugElement.queryAll(By.css('button'))[2]
|
||||
const editButton = fixture.debugElement
|
||||
.queryAll(By.css('button'))
|
||||
.find((btn) =>
|
||||
btn.nativeElement.textContent.trim().includes(fields[0].name)
|
||||
)
|
||||
editButton.triggerEventHandler('click')
|
||||
|
||||
expect(modal).not.toBeUndefined()
|
||||
@@ -158,7 +166,9 @@ describe('CustomFieldsComponent', () => {
|
||||
const deleteSpy = jest.spyOn(customFieldsService, 'delete')
|
||||
const reloadSpy = jest.spyOn(component, 'reload')
|
||||
|
||||
const deleteButton = fixture.debugElement.queryAll(By.css('button'))[5]
|
||||
const deleteButton = fixture.debugElement
|
||||
.queryAll(By.css('button'))
|
||||
.find((btn) => btn.nativeElement.textContent.trim().includes('Delete'))
|
||||
deleteButton.triggerEventHandler('click')
|
||||
|
||||
expect(modal).not.toBeUndefined()
|
||||
@@ -176,10 +186,10 @@ describe('CustomFieldsComponent', () => {
|
||||
expect(reloadSpy).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should support filter documents', () => {
|
||||
const filterSpy = jest.spyOn(listViewService, 'quickFilter')
|
||||
component.filterDocuments(fields[0])
|
||||
expect(filterSpy).toHaveBeenCalledWith([
|
||||
it('should provide document filter url', () => {
|
||||
const urlSpy = jest.spyOn(listViewService, 'getQuickFilterUrl')
|
||||
component.getDocumentFilterUrl(fields[0])
|
||||
expect(urlSpy).toHaveBeenCalledWith([
|
||||
{
|
||||
rule_type: FILTER_CUSTOM_FIELDS_QUERY,
|
||||
value: JSON.stringify([
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { Component, OnInit, inject } from '@angular/core'
|
||||
import { RouterModule } from '@angular/router'
|
||||
import {
|
||||
NgbDropdownModule,
|
||||
NgbModal,
|
||||
@@ -36,6 +37,7 @@ import { LoadingComponentWithPermissions } from '../../loading-component/loading
|
||||
NgbDropdownModule,
|
||||
NgbPaginationModule,
|
||||
NgxBootstrapIconsModule,
|
||||
RouterModule,
|
||||
],
|
||||
})
|
||||
export class CustomFieldsComponent
|
||||
@@ -130,8 +132,8 @@ export class CustomFieldsComponent
|
||||
return DATA_TYPE_LABELS.find((l) => l.id === field.data_type).name
|
||||
}
|
||||
|
||||
filterDocuments(field: CustomField) {
|
||||
this.documentListViewService.quickFilter([
|
||||
getDocumentFilterUrl(field: CustomField) {
|
||||
return this.documentListViewService.getQuickFilterUrl([
|
||||
{
|
||||
rule_type: FILTER_CUSTOM_FIELDS_QUERY,
|
||||
value: JSON.stringify([
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { NgClass, NgTemplateOutlet, TitleCasePipe } from '@angular/common'
|
||||
import { Component, inject } from '@angular/core'
|
||||
import { FormsModule, ReactiveFormsModule } from '@angular/forms'
|
||||
import { RouterModule } from '@angular/router'
|
||||
import {
|
||||
NgbDropdownModule,
|
||||
NgbPaginationModule,
|
||||
@@ -27,6 +28,7 @@ import { ManagementListComponent } from '../management-list/management-list.comp
|
||||
IfPermissionsDirective,
|
||||
FormsModule,
|
||||
ReactiveFormsModule,
|
||||
RouterModule,
|
||||
NgClass,
|
||||
NgTemplateOutlet,
|
||||
NgbDropdownModule,
|
||||
|
||||
@@ -120,7 +120,14 @@
|
||||
<button (click)="openEditDialog(object)" *pngxIfPermissions="{ action: PermissionAction.Change, type: permissionType }" ngbDropdownItem i18n>Edit</button>
|
||||
<button class="text-danger" (click)="openDeleteDialog(object)" *pngxIfPermissions="{ action: PermissionAction.Delete, type: permissionType }" ngbDropdownItem i18n>Delete</button>
|
||||
@if (getDocumentCount(object) > 0) {
|
||||
<button (click)="filterDocuments(object)" *pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.Document }" ngbDropdownItem i18n>Filter Documents ({{ getDocumentCount(object) }})</button>
|
||||
<a
|
||||
*pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.Document }"
|
||||
ngbDropdownItem
|
||||
[routerLink]="getDocumentFilterUrl(object)"
|
||||
(click)="$event?.stopPropagation()"
|
||||
i18n
|
||||
>Filter Documents ({{ getDocumentCount(object) }})</a
|
||||
>
|
||||
}
|
||||
</div>
|
||||
</div>
|
||||
@@ -135,9 +142,15 @@
|
||||
</div>
|
||||
@if (getDocumentCount(object) > 0) {
|
||||
<div class="btn-group d-none d-sm-inline-block">
|
||||
<button class="btn btn-sm btn-outline-secondary" (click)="filterDocuments(object); $event.stopPropagation();" *pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.Document }">
|
||||
<i-bs width="1em" height="1em" name="filter"></i-bs> <ng-container i18n>Documents</ng-container><span class="badge bg-light text-secondary ms-2">{{ getDocumentCount(object) }}</span>
|
||||
</button>
|
||||
<a
|
||||
class="btn btn-sm btn-outline-secondary"
|
||||
*pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.Document }"
|
||||
[routerLink]="getDocumentFilterUrl(object)"
|
||||
(click)="$event?.stopPropagation()"
|
||||
>
|
||||
<i-bs width="1em" height="1em" name="filter"></i-bs> <ng-container i18n>Documents</ng-container
|
||||
><span class="badge bg-light text-secondary ms-2">{{ getDocumentCount(object) }}</span>
|
||||
</a>
|
||||
</div>
|
||||
}
|
||||
</div>
|
||||
|
||||
@@ -13,6 +13,7 @@ import {
|
||||
} from '@angular/core/testing'
|
||||
import { FormsModule, ReactiveFormsModule } from '@angular/forms'
|
||||
import { By } from '@angular/platform-browser'
|
||||
import { RouterLinkWithHref } from '@angular/router'
|
||||
import { RouterTestingModule } from '@angular/router/testing'
|
||||
import {
|
||||
NgbModal,
|
||||
@@ -230,12 +231,15 @@ describe('ManagementListComponent', () => {
|
||||
})
|
||||
|
||||
it('should support quick filter for objects', () => {
|
||||
const qfSpy = jest.spyOn(documentListViewService, 'quickFilter')
|
||||
const filterButton = fixture.debugElement.queryAll(By.css('button'))[9]
|
||||
filterButton.triggerEventHandler('click')
|
||||
expect(qfSpy).toHaveBeenCalledWith([
|
||||
const expectedUrl = documentListViewService.getQuickFilterUrl([
|
||||
{ rule_type: FILTER_HAS_TAGS_ALL, value: tags[0].id.toString() },
|
||||
]) // subclasses set the filter rule type
|
||||
])
|
||||
const filterLink = fixture.debugElement.query(
|
||||
By.css('a.btn-outline-secondary')
|
||||
)
|
||||
expect(filterLink).toBeTruthy()
|
||||
const routerLink = filterLink.injector.get(RouterLinkWithHref)
|
||||
expect(routerLink.urlTree).toEqual(expectedUrl)
|
||||
})
|
||||
|
||||
it('should reload on sort', () => {
|
||||
|
||||
@@ -230,8 +230,8 @@ export abstract class ManagementListComponent<T extends MatchingModel>
|
||||
|
||||
abstract getDeleteMessage(object: T)
|
||||
|
||||
filterDocuments(object: MatchingModel) {
|
||||
this.documentListViewService.quickFilter([
|
||||
getDocumentFilterUrl(object: MatchingModel) {
|
||||
return this.documentListViewService.getQuickFilterUrl([
|
||||
{ rule_type: this.filterRuleType, value: object.id.toString() },
|
||||
])
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { NgClass, NgTemplateOutlet, TitleCasePipe } from '@angular/common'
|
||||
import { Component, inject } from '@angular/core'
|
||||
import { FormsModule, ReactiveFormsModule } from '@angular/forms'
|
||||
import { RouterModule } from '@angular/router'
|
||||
import {
|
||||
NgbDropdownModule,
|
||||
NgbPaginationModule,
|
||||
@@ -27,6 +28,7 @@ import { ManagementListComponent } from '../management-list/management-list.comp
|
||||
IfPermissionsDirective,
|
||||
FormsModule,
|
||||
ReactiveFormsModule,
|
||||
RouterModule,
|
||||
NgClass,
|
||||
NgTemplateOutlet,
|
||||
NgbDropdownModule,
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { NgClass, NgTemplateOutlet, TitleCasePipe } from '@angular/common'
|
||||
import { Component, inject } from '@angular/core'
|
||||
import { FormsModule, ReactiveFormsModule } from '@angular/forms'
|
||||
import { RouterModule } from '@angular/router'
|
||||
import {
|
||||
NgbDropdownModule,
|
||||
NgbPaginationModule,
|
||||
@@ -27,6 +28,7 @@ import { ManagementListComponent } from '../management-list/management-list.comp
|
||||
IfPermissionsDirective,
|
||||
FormsModule,
|
||||
ReactiveFormsModule,
|
||||
RouterModule,
|
||||
NgClass,
|
||||
NgTemplateOutlet,
|
||||
NgbDropdownModule,
|
||||
|
||||
@@ -651,4 +651,25 @@ describe('DocumentListViewService', () => {
|
||||
documentListViewService.displayFields = customFields as any
|
||||
expect(documentListViewService.displayFields).toEqual(['custom_field_1'])
|
||||
})
|
||||
|
||||
it('should generate quick filter URL with filter rules', () => {
|
||||
const routerSpy = jest.spyOn(router, 'createUrlTree')
|
||||
const urlTree = documentListViewService.getQuickFilterUrl(filterRules)
|
||||
expect(routerSpy).toHaveBeenCalledWith(['/documents'], {
|
||||
queryParams: expect.objectContaining({
|
||||
tags__id__all: tags__id__all,
|
||||
}),
|
||||
})
|
||||
expect(urlTree).toBeDefined()
|
||||
})
|
||||
|
||||
it('should generate quick filter URL preserving default state', () => {
|
||||
documentListViewService.reload()
|
||||
httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true`
|
||||
)
|
||||
const urlTree = documentListViewService.getQuickFilterUrl(filterRules)
|
||||
expect(urlTree).toBeDefined()
|
||||
expect(router.createUrlTree).toBeDefined()
|
||||
})
|
||||
})
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { Injectable, inject } from '@angular/core'
|
||||
import { ParamMap, Router } from '@angular/router'
|
||||
import { ParamMap, Router, UrlTree } from '@angular/router'
|
||||
import { Observable, Subject, first, takeUntil } from 'rxjs'
|
||||
import {
|
||||
DEFAULT_DISPLAY_FIELDS,
|
||||
@@ -483,6 +483,18 @@ export class DocumentListViewService {
|
||||
this.router.navigate(['documents'])
|
||||
}
|
||||
|
||||
getQuickFilterUrl(filterRules: FilterRule[]): UrlTree {
|
||||
const defaultState = {
|
||||
...this.defaultListViewState(),
|
||||
...this.listViewStates.get(null),
|
||||
filterRules,
|
||||
}
|
||||
const params = paramsFromViewState(defaultState)
|
||||
return this.router.createUrlTree(['/documents'], {
|
||||
queryParams: params,
|
||||
})
|
||||
}
|
||||
|
||||
getLastPage(): number {
|
||||
return Math.ceil(this.collectionSize / this.pageSize)
|
||||
}
|
||||
|
||||
@@ -46,7 +46,6 @@ from documents.signals.handlers import run_workflows
|
||||
from documents.templating.workflows import parse_w_workflow_placeholders
|
||||
from documents.utils import copy_basic_file_stats
|
||||
from documents.utils import copy_file_with_basic_stats
|
||||
from documents.utils import normalize_nfc
|
||||
from documents.utils import run_subprocess
|
||||
from paperless_mail.parsers import MailDocumentParser
|
||||
|
||||
@@ -112,12 +111,7 @@ class ConsumerPluginMixin:
|
||||
|
||||
self.renew_logging_group()
|
||||
|
||||
self.metadata.filename = normalize_nfc(self.metadata.filename)
|
||||
self.metadata.title = normalize_nfc(self.metadata.title)
|
||||
|
||||
self.filename = normalize_nfc(
|
||||
self.metadata.filename or self.input_doc.original_file.name,
|
||||
)
|
||||
self.filename = self.metadata.filename or self.input_doc.original_file.name
|
||||
|
||||
def _send_progress(
|
||||
self,
|
||||
@@ -658,8 +652,6 @@ class ConsumerPlugin(
|
||||
f"Error occurred parsing title override '{self.metadata.title}', falling back to original. Exception: {e}",
|
||||
)
|
||||
|
||||
title = normalize_nfc(title)
|
||||
|
||||
file_for_checksum = (
|
||||
self.unmodified_original
|
||||
if self.unmodified_original is not None
|
||||
|
||||
@@ -6,7 +6,6 @@ from django.conf import settings
|
||||
from documents.models import Document
|
||||
from documents.templating.filepath import validate_filepath_template_and_render
|
||||
from documents.templating.utils import convert_format_str_to_template_format
|
||||
from documents.utils import normalize_nfc
|
||||
|
||||
|
||||
def create_source_path_directory(source_path: Path) -> None:
|
||||
@@ -56,11 +55,11 @@ def generate_unique_filename(doc, *, archive_filename=False) -> Path:
|
||||
"""
|
||||
if archive_filename:
|
||||
old_filename: Path | None = (
|
||||
Path(normalize_nfc(doc.archive_filename)) if doc.archive_filename else None
|
||||
Path(doc.archive_filename) if doc.archive_filename else None
|
||||
)
|
||||
root = settings.ARCHIVE_DIR
|
||||
else:
|
||||
old_filename = Path(normalize_nfc(doc.filename)) if doc.filename else None
|
||||
old_filename = Path(doc.filename) if doc.filename else None
|
||||
root = settings.ORIGINALS_DIR
|
||||
|
||||
# If generating archive filenames, try to make a name that is similar to
|
||||
@@ -92,7 +91,7 @@ def generate_unique_filename(doc, *, archive_filename=False) -> Path:
|
||||
)
|
||||
if new_filename == old_filename:
|
||||
# still the same as before.
|
||||
return Path(normalize_nfc(str(new_filename)))
|
||||
return new_filename
|
||||
|
||||
if (root / new_filename).exists():
|
||||
counter += 1
|
||||
@@ -120,7 +119,7 @@ def format_filename(document: Document, template_str: str) -> str | None:
|
||||
"none",
|
||||
) # backward compatibility
|
||||
|
||||
return normalize_nfc(rendered_filename)
|
||||
return rendered_filename
|
||||
|
||||
|
||||
def generate_filename(
|
||||
@@ -175,4 +174,4 @@ def generate_filename(
|
||||
if append_gpg and doc.storage_type == doc.STORAGE_TYPE_GPG:
|
||||
full_path = full_path.with_suffix(full_path.suffix + ".gpg")
|
||||
|
||||
return Path(normalize_nfc(str(full_path)))
|
||||
return full_path
|
||||
|
||||
@@ -3,7 +3,6 @@ from __future__ import annotations
|
||||
import logging
|
||||
import math
|
||||
import re
|
||||
import unicodedata
|
||||
from collections import Counter
|
||||
from contextlib import contextmanager
|
||||
from datetime import datetime
|
||||
@@ -59,14 +58,6 @@ if TYPE_CHECKING:
|
||||
logger = logging.getLogger("paperless.index")
|
||||
|
||||
|
||||
def _normalize_for_index(value: str | None) -> str | None:
|
||||
"""Normalize text to NFC for consistent search/index matching."""
|
||||
|
||||
if value is None:
|
||||
return None
|
||||
return unicodedata.normalize("NFC", value)
|
||||
|
||||
|
||||
def get_schema() -> Schema:
|
||||
return Schema(
|
||||
id=NUMERIC(stored=True, unique=True),
|
||||
@@ -172,41 +163,37 @@ def update_document(writer: AsyncWriter, doc: Document) -> None:
|
||||
viewer_ids: str = ",".join([str(u.id) for u in users_with_perms])
|
||||
writer.update_document(
|
||||
id=doc.pk,
|
||||
title=_normalize_for_index(doc.title),
|
||||
content=_normalize_for_index(doc.content),
|
||||
correspondent=_normalize_for_index(
|
||||
doc.correspondent.name if doc.correspondent else None,
|
||||
),
|
||||
title=doc.title,
|
||||
content=doc.content,
|
||||
correspondent=doc.correspondent.name if doc.correspondent else None,
|
||||
correspondent_id=doc.correspondent.id if doc.correspondent else None,
|
||||
has_correspondent=doc.correspondent is not None,
|
||||
tag=_normalize_for_index(tags) if tags else None,
|
||||
tag=tags if tags else None,
|
||||
tag_id=tags_ids if tags_ids else None,
|
||||
has_tag=len(tags) > 0,
|
||||
type=_normalize_for_index(
|
||||
doc.document_type.name if doc.document_type else None,
|
||||
),
|
||||
type=doc.document_type.name if doc.document_type else None,
|
||||
type_id=doc.document_type.id if doc.document_type else None,
|
||||
has_type=doc.document_type is not None,
|
||||
created=datetime.combine(doc.created, time.min),
|
||||
added=doc.added,
|
||||
asn=asn,
|
||||
modified=doc.modified,
|
||||
path=_normalize_for_index(doc.storage_path.name if doc.storage_path else None),
|
||||
path=doc.storage_path.name if doc.storage_path else None,
|
||||
path_id=doc.storage_path.id if doc.storage_path else None,
|
||||
has_path=doc.storage_path is not None,
|
||||
notes=_normalize_for_index(notes),
|
||||
notes=notes,
|
||||
num_notes=len(notes),
|
||||
custom_fields=_normalize_for_index(custom_fields),
|
||||
custom_fields=custom_fields,
|
||||
custom_field_count=len(doc.custom_fields.all()),
|
||||
has_custom_fields=len(custom_fields) > 0,
|
||||
custom_fields_id=custom_fields_ids if custom_fields_ids else None,
|
||||
owner=_normalize_for_index(doc.owner.username if doc.owner else None),
|
||||
owner=doc.owner.username if doc.owner else None,
|
||||
owner_id=doc.owner.id if doc.owner else None,
|
||||
has_owner=doc.owner is not None,
|
||||
viewer_id=viewer_ids if viewer_ids else None,
|
||||
checksum=doc.checksum,
|
||||
page_count=doc.page_count,
|
||||
original_filename=_normalize_for_index(doc.original_filename),
|
||||
original_filename=doc.original_filename,
|
||||
is_shared=len(viewer_ids) > 0,
|
||||
)
|
||||
logger.debug(f"Index updated for document {doc.pk}.")
|
||||
@@ -434,7 +421,7 @@ class LocalDateParser(English):
|
||||
|
||||
class DelayedFullTextQuery(DelayedQuery):
|
||||
def _get_query(self) -> tuple:
|
||||
q_str = _normalize_for_index(self.query_params["query"]) or ""
|
||||
q_str = self.query_params["query"]
|
||||
q_str = rewrite_natural_date_keywords(q_str)
|
||||
qp = MultifieldParser(
|
||||
[
|
||||
@@ -473,12 +460,7 @@ class DelayedFullTextQuery(DelayedQuery):
|
||||
class DelayedMoreLikeThisQuery(DelayedQuery):
|
||||
def _get_query(self) -> tuple:
|
||||
more_like_doc_id = int(self.query_params["more_like_id"])
|
||||
content = (
|
||||
_normalize_for_index(
|
||||
Document.objects.get(id=more_like_doc_id).content,
|
||||
)
|
||||
or ""
|
||||
)
|
||||
content = Document.objects.get(id=more_like_doc_id).content
|
||||
|
||||
docnum = self.searcher.document_number(id=more_like_doc_id)
|
||||
kts = self.searcher.key_terms_from_text(
|
||||
@@ -506,7 +488,6 @@ def autocomplete(
|
||||
Mimics whoosh.reading.IndexReader.most_distinctive_terms with permissions
|
||||
and without scoring
|
||||
"""
|
||||
term = _normalize_for_index(term) or ""
|
||||
terms = []
|
||||
|
||||
with ix.searcher(weighting=TF_IDF()) as s:
|
||||
|
||||
@@ -2,12 +2,10 @@ from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import re
|
||||
import unicodedata
|
||||
from fnmatch import fnmatch
|
||||
from fnmatch import translate as fnmatch_translate
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from django.db.models import Q
|
||||
from rest_framework import serializers
|
||||
|
||||
from documents.data_models import ConsumableDocument
|
||||
@@ -23,7 +21,6 @@ from documents.models import Workflow
|
||||
from documents.models import WorkflowTrigger
|
||||
from documents.permissions import get_objects_for_user_owner_aware
|
||||
from documents.regex import safe_regex_search
|
||||
from documents.utils import normalize_nfc
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from django.db.models import QuerySet
|
||||
@@ -33,34 +30,6 @@ if TYPE_CHECKING:
|
||||
logger = logging.getLogger("paperless.matching")
|
||||
|
||||
|
||||
def _normalize_glob_value(value: str) -> str:
|
||||
"""Normalize strings for glob-style matching (case-insensitive)."""
|
||||
|
||||
return (normalize_nfc(value) or "").casefold()
|
||||
|
||||
|
||||
def _normalized_fnmatch(name: str, pattern: str) -> bool:
|
||||
"""Canonicalize Unicode and compare using fnmatch semantics."""
|
||||
|
||||
return fnmatch(_normalize_glob_value(name), _normalize_glob_value(pattern))
|
||||
|
||||
|
||||
def _glob_regex_variants(pattern: str) -> list[str]:
|
||||
"""
|
||||
Build regex patterns that match both NFC and NFD forms of a glob pattern.
|
||||
Using both forms lets DB prefilters remain Unicode-normalization agnostic.
|
||||
"""
|
||||
|
||||
regexes = set()
|
||||
for normalized in {
|
||||
normalize_nfc(pattern) or "",
|
||||
unicodedata.normalize("NFD", pattern),
|
||||
}:
|
||||
regex = fnmatch_translate(normalized).lstrip("^").rstrip("$")
|
||||
regexes.add(regex)
|
||||
return list(regexes)
|
||||
|
||||
|
||||
def log_reason(
|
||||
matching_model: MatchingModel | WorkflowTrigger,
|
||||
document: Document,
|
||||
@@ -336,9 +305,9 @@ def consumable_document_matches_workflow(
|
||||
if (
|
||||
trigger.filter_filename is not None
|
||||
and len(trigger.filter_filename) > 0
|
||||
and not _normalized_fnmatch(
|
||||
document.original_file.name,
|
||||
trigger.filter_filename,
|
||||
and not fnmatch(
|
||||
document.original_file.name.lower(),
|
||||
trigger.filter_filename.lower(),
|
||||
)
|
||||
):
|
||||
reason = (
|
||||
@@ -359,7 +328,7 @@ def consumable_document_matches_workflow(
|
||||
if (
|
||||
trigger.filter_path is not None
|
||||
and len(trigger.filter_path) > 0
|
||||
and not _normalized_fnmatch(
|
||||
and not fnmatch(
|
||||
match_against,
|
||||
trigger.filter_path,
|
||||
)
|
||||
@@ -523,9 +492,9 @@ def existing_document_matches_workflow(
|
||||
trigger.filter_filename is not None
|
||||
and len(trigger.filter_filename) > 0
|
||||
and document.original_filename is not None
|
||||
and not _normalized_fnmatch(
|
||||
document.original_filename,
|
||||
trigger.filter_filename,
|
||||
and not fnmatch(
|
||||
document.original_filename.lower(),
|
||||
trigger.filter_filename.lower(),
|
||||
)
|
||||
):
|
||||
return (
|
||||
@@ -604,11 +573,8 @@ def prefilter_documents_by_workflowtrigger(
|
||||
documents = documents.annotate(**annotations).filter(custom_field_q)
|
||||
|
||||
if trigger.filter_filename:
|
||||
regexes = _glob_regex_variants(trigger.filter_filename)
|
||||
filename_q = Q()
|
||||
for regex in regexes:
|
||||
filename_q |= Q(original_filename__iregex=regex)
|
||||
documents = documents.filter(filename_q)
|
||||
regex = fnmatch_translate(trigger.filter_filename).lstrip("^").rstrip("$")
|
||||
documents = documents.filter(original_filename__iregex=regex)
|
||||
|
||||
return documents
|
||||
|
||||
|
||||
@@ -89,23 +89,6 @@ class TestDocumentSearchApi(DirectoriesMixin, APITestCase):
|
||||
self.assertEqual(len(results), 0)
|
||||
self.assertCountEqual(response.data["all"], [])
|
||||
|
||||
def test_search_handles_diacritics_normalization(self):
|
||||
doc = Document.objects.create(
|
||||
title="certida\u0303o de nascimento",
|
||||
content="birth record without keyword",
|
||||
checksum="D",
|
||||
pk=10,
|
||||
)
|
||||
with AsyncWriter(index.open_index()) as writer:
|
||||
index.update_document(writer, doc)
|
||||
|
||||
response = self.client.get("/api/documents/?query=certidão")
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
results = response.data["results"]
|
||||
self.assertEqual(response.data["count"], 1)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]["id"], doc.id)
|
||||
|
||||
def test_search_custom_field_ordering(self):
|
||||
custom_field = CustomField.objects.create(
|
||||
name="Sortable field",
|
||||
|
||||
@@ -290,23 +290,6 @@ class TestConsumer(
|
||||
|
||||
self._assert_first_last_send_progress()
|
||||
|
||||
def test_override_filename_normalized(self):
|
||||
filename = self.get_test_file()
|
||||
override_filename = "Inhaltsu\u0308bersicht.pdf"
|
||||
|
||||
with self.get_consumer(
|
||||
filename,
|
||||
DocumentMetadataOverrides(filename=override_filename),
|
||||
) as consumer:
|
||||
consumer.run()
|
||||
|
||||
document = Document.objects.first()
|
||||
|
||||
self.assertIsNotNone(document)
|
||||
self.assertEqual(document.original_filename, "Inhaltsübersicht.pdf")
|
||||
self.assertEqual(document.title, "Inhaltsübersicht")
|
||||
self._assert_first_last_send_progress()
|
||||
|
||||
def testOverrideTitle(self):
|
||||
with self.get_consumer(
|
||||
self.get_test_file(),
|
||||
@@ -321,25 +304,6 @@ class TestConsumer(
|
||||
self.assertEqual(document.title, "Override Title")
|
||||
self._assert_first_last_send_progress()
|
||||
|
||||
@override_settings(FILENAME_FORMAT="{{ title }}")
|
||||
def test_filename_format_normalized(self):
|
||||
filename = self.get_test_file()
|
||||
title = "Inhaltsu\u0308bersicht Faszination"
|
||||
|
||||
with self.get_consumer(
|
||||
filename,
|
||||
DocumentMetadataOverrides(title=title),
|
||||
) as consumer:
|
||||
consumer.run()
|
||||
|
||||
document = Document.objects.first()
|
||||
|
||||
self.assertIsNotNone(document)
|
||||
self.assertEqual(document.title, "Inhaltsübersicht Faszination")
|
||||
self.assertEqual(document.filename, "Inhaltsübersicht Faszination.pdf")
|
||||
self.assertIsFile(document.source_path)
|
||||
self._assert_first_last_send_progress()
|
||||
|
||||
def testOverrideCorrespondent(self):
|
||||
c = Correspondent.objects.create(name="test")
|
||||
|
||||
|
||||
@@ -557,50 +557,6 @@ class TestWorkflows(
|
||||
expected_str = f"Document filename {test_file.name} does not match"
|
||||
self.assertIn(expected_str, cm.output[1])
|
||||
|
||||
def test_workflow_match_filename_diacritics_normalized(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Consumption workflow filtering on filename with diacritics
|
||||
WHEN:
|
||||
- File with decomposed Unicode filename is consumed
|
||||
THEN:
|
||||
- Workflow still matches and applies overrides
|
||||
"""
|
||||
trigger = WorkflowTrigger.objects.create(
|
||||
type=WorkflowTrigger.WorkflowTriggerType.CONSUMPTION,
|
||||
sources=f"{DocumentSource.ApiUpload},{DocumentSource.ConsumeFolder},{DocumentSource.MailFetch}",
|
||||
filter_filename="*račun*",
|
||||
)
|
||||
action = WorkflowAction.objects.create(
|
||||
assign_title="Diacritics matched",
|
||||
)
|
||||
action.save()
|
||||
|
||||
w = Workflow.objects.create(
|
||||
name="Workflow 1",
|
||||
order=0,
|
||||
)
|
||||
w.triggers.add(trigger)
|
||||
w.actions.add(action)
|
||||
w.save()
|
||||
|
||||
decomposed_name = "rac\u030cun.pdf"
|
||||
test_file = shutil.copy(
|
||||
self.SAMPLE_DIR / "simple.pdf",
|
||||
self.dirs.scratch_dir / decomposed_name,
|
||||
)
|
||||
|
||||
with mock.patch("documents.tasks.ProgressManager", DummyProgressManager):
|
||||
tasks.consume_file(
|
||||
ConsumableDocument(
|
||||
source=DocumentSource.ConsumeFolder,
|
||||
original_file=test_file,
|
||||
),
|
||||
None,
|
||||
)
|
||||
document = Document.objects.first()
|
||||
self.assertEqual(document.title, "Diacritics matched")
|
||||
|
||||
def test_workflow_no_match_path(self):
|
||||
"""
|
||||
GIVEN:
|
||||
@@ -990,35 +946,6 @@ class TestWorkflows(
|
||||
self.assertEqual(doc.correspondent, self.c2)
|
||||
self.assertEqual(doc.title, f"Doc created in {created.year}")
|
||||
|
||||
def test_document_added_filename_diacritics_normalized(self):
|
||||
trigger = WorkflowTrigger.objects.create(
|
||||
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
|
||||
filter_filename="*račun*",
|
||||
)
|
||||
action = WorkflowAction.objects.create(
|
||||
assign_title="Matched diacritics",
|
||||
)
|
||||
w = Workflow.objects.create(
|
||||
name="Workflow 1",
|
||||
order=0,
|
||||
)
|
||||
w.triggers.add(trigger)
|
||||
w.actions.add(action)
|
||||
w.save()
|
||||
|
||||
doc = Document.objects.create(
|
||||
title="sample test",
|
||||
correspondent=self.c,
|
||||
original_filename="rac\u030cun.pdf",
|
||||
)
|
||||
|
||||
document_consumption_finished.send(
|
||||
sender=self.__class__,
|
||||
document=doc,
|
||||
)
|
||||
|
||||
self.assertEqual(doc.title, "Matched diacritics")
|
||||
|
||||
def test_document_added_no_match_filename(self):
|
||||
trigger = WorkflowTrigger.objects.create(
|
||||
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
import logging
|
||||
import shutil
|
||||
import unicodedata
|
||||
from os import PathLike
|
||||
from os import utime
|
||||
from pathlib import Path
|
||||
from subprocess import CompletedProcess
|
||||
@@ -18,14 +16,6 @@ def _coerce_to_path(
|
||||
return Path(source).resolve(), Path(dest).resolve()
|
||||
|
||||
|
||||
def normalize_nfc(value: str | PathLike[str] | None) -> str | None:
|
||||
"""Return NFC-normalized string for filesystem-safe comparisons."""
|
||||
|
||||
if value is None:
|
||||
return None
|
||||
return unicodedata.normalize("NFC", str(value))
|
||||
|
||||
|
||||
def copy_basic_file_stats(source: Path | str, dest: Path | str) -> None:
|
||||
"""
|
||||
Copies only the m_time and a_time attributes from source to destination.
|
||||
|
||||
Reference in New Issue
Block a user