目前还差物联网分配问题
This commit is contained in:
155
pc-cattle-transportation/AUTO_FILL_IMPLEMENTATION.md
Normal file
155
pc-cattle-transportation/AUTO_FILL_IMPLEMENTATION.md
Normal file
@@ -0,0 +1,155 @@
|
|||||||
|
# 装车信息表单自动填充功能实现
|
||||||
|
|
||||||
|
## 功能概述
|
||||||
|
|
||||||
|
已成功实现装车信息表单的自动填充功能,可以根据API返回的数据自动映射并填充表单字段。
|
||||||
|
|
||||||
|
## 实现的功能
|
||||||
|
|
||||||
|
### 1. 数据映射字段
|
||||||
|
|
||||||
|
根据提供的API响应数据,实现了以下字段的自动映射:
|
||||||
|
|
||||||
|
#### 基础信息
|
||||||
|
- `deliveryId` ← `id`
|
||||||
|
- `estimatedDeliveryTime` ← `estimatedDeliveryTime`
|
||||||
|
- `serverDeviceSn` ← `serverDeviceId`
|
||||||
|
|
||||||
|
#### 重量信息
|
||||||
|
- `emptyWeight` ← `emptyWeight`
|
||||||
|
- `entruckWeight` ← `entruckWeight`
|
||||||
|
- `landingEntruckWeight` ← `landingEntruckWeight`
|
||||||
|
|
||||||
|
#### 照片URL
|
||||||
|
- `quarantineTickeyUrl` ← `quarantineTickeyUrl`
|
||||||
|
- `poundListImg` ← `poundListImg`
|
||||||
|
- `emptyVehicleFrontPhoto` ← `emptyVehicleFrontPhoto`
|
||||||
|
- `loadedVehicleFrontPhoto` ← `loadedVehicleFrontPhoto`
|
||||||
|
- `loadedVehicleWeightPhoto` ← `loadedVehicleWeightPhoto`
|
||||||
|
- `driverIdCardPhoto` ← `driverIdCardPhoto`
|
||||||
|
|
||||||
|
#### 视频URL
|
||||||
|
- `entruckWeightVideo` ← `entruckWeightVideo`
|
||||||
|
- `emptyWeightVideo` ← `emptyWeightVideo`
|
||||||
|
- `entruckVideo` ← `entruckVideo`
|
||||||
|
- `controlSlotVideo` ← `controlSlotVideo`
|
||||||
|
- `cattleLoadingCircleVideo` ← `cattleLoadingCircleVideo`
|
||||||
|
|
||||||
|
### 2. 核心实现
|
||||||
|
|
||||||
|
#### 自动填充函数
|
||||||
|
```javascript
|
||||||
|
const autoFillFormData = (apiData) => {
|
||||||
|
if (!apiData) return;
|
||||||
|
|
||||||
|
// 基础信息映射
|
||||||
|
ruleForm.deliveryId = apiData.id || '';
|
||||||
|
ruleForm.estimatedDeliveryTime = apiData.estimatedDeliveryTime || '';
|
||||||
|
ruleForm.serverDeviceSn = apiData.serverDeviceId || '';
|
||||||
|
|
||||||
|
// 重量信息映射
|
||||||
|
ruleForm.emptyWeight = apiData.emptyWeight || '';
|
||||||
|
ruleForm.entruckWeight = apiData.entruckWeight || '';
|
||||||
|
ruleForm.landingEntruckWeight = apiData.landingEntruckWeight || '';
|
||||||
|
|
||||||
|
// 照片URL映射
|
||||||
|
ruleForm.quarantineTickeyUrl = apiData.quarantineTickeyUrl || '';
|
||||||
|
ruleForm.poundListImg = apiData.poundListImg || '';
|
||||||
|
ruleForm.emptyVehicleFrontPhoto = apiData.emptyVehicleFrontPhoto || '';
|
||||||
|
ruleForm.loadedVehicleFrontPhoto = apiData.loadedVehicleFrontPhoto || '';
|
||||||
|
ruleForm.loadedVehicleWeightPhoto = apiData.loadedVehicleWeightPhoto || '';
|
||||||
|
ruleForm.driverIdCardPhoto = apiData.driverIdCardPhoto || '';
|
||||||
|
|
||||||
|
// 视频URL映射
|
||||||
|
ruleForm.entruckWeightVideo = apiData.entruckWeightVideo || '';
|
||||||
|
ruleForm.emptyWeightVideo = apiData.emptyWeightVideo || '';
|
||||||
|
ruleForm.entruckVideo = apiData.entruckVideo || '';
|
||||||
|
ruleForm.controlSlotVideo = apiData.controlSlotVideo || '';
|
||||||
|
ruleForm.cattleLoadingCircleVideo = apiData.cattleLoadingCircleVideo || '';
|
||||||
|
|
||||||
|
console.log('表单数据已自动填充:', ruleForm);
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
#### 更新后的对话框调用函数
|
||||||
|
```javascript
|
||||||
|
const onShowDialog = (row, apiData = null) => {
|
||||||
|
data.dialogVisible = true;
|
||||||
|
if (formDataRef.value) {
|
||||||
|
formDataRef.value.resetFields();
|
||||||
|
}
|
||||||
|
if (row) {
|
||||||
|
nextTick(() => {
|
||||||
|
data.deliveryId = row.id;
|
||||||
|
ruleForm.deliveryId = row.id;
|
||||||
|
|
||||||
|
// 如果提供了API数据,直接填充表单
|
||||||
|
if (apiData) {
|
||||||
|
autoFillFormData(apiData);
|
||||||
|
} else {
|
||||||
|
// 否则从服务器获取详情
|
||||||
|
getOrderDetail();
|
||||||
|
}
|
||||||
|
|
||||||
|
getHostList();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
## 使用方法
|
||||||
|
|
||||||
|
### 方法1:直接传递API数据
|
||||||
|
```javascript
|
||||||
|
// 在调用装车对话框时,直接传递API响应数据
|
||||||
|
const loadClick = (row, apiData) => {
|
||||||
|
if (LoadDialogRef.value) {
|
||||||
|
LoadDialogRef.value.onShowDialog(row, apiData);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
### 方法2:在API响应中自动填充
|
||||||
|
当调用 `getOrderDetail()` 函数时,会自动调用 `autoFillFormData(res.data)` 来填充表单。
|
||||||
|
|
||||||
|
## 示例数据映射
|
||||||
|
|
||||||
|
基于提供的API响应数据:
|
||||||
|
```javascript
|
||||||
|
{
|
||||||
|
id: 85,
|
||||||
|
deliveryNumber: "ZC20251020105111",
|
||||||
|
deliveryTitle: "1",
|
||||||
|
estimatedDeliveryTime: "2025-10-31 00:00:00",
|
||||||
|
emptyWeight: "1000.00",
|
||||||
|
entruckWeight: "2000.00",
|
||||||
|
quarantineTickeyUrl: "https://smart-1251449951.cos.ap-guangzhou.myqcloud.com/iotPlateform/2025/10/21/4c4e20251021100838.jpg",
|
||||||
|
poundListImg: "https://smart-1251449951.cos.ap-guangzhou.myqcloud.com/iotPlateform/2025/10/21/cows20251021100841.jpg",
|
||||||
|
emptyVehicleFrontPhoto: "https://smart-1251449951.cos.ap-guangzhou.myqcloud.com/iotPlateform/2025/10/21/4c4e20251021100847.jpg",
|
||||||
|
loadedVehicleFrontPhoto: "https://smart-1251449951.cos.ap-guangzhou.myqcloud.com/iotPlateform/2025/10/21/cows20251021100849.jpg",
|
||||||
|
loadedVehicleWeightPhoto: "https://smart-1251449951.cos.ap-guangzhou.myqcloud.com/iotPlateform/2025/10/21/4c4e20251021100854.jpg",
|
||||||
|
driverIdCardPhoto: "https://smart-1251449951.cos.ap-guangzhou.myqcloud.com/iotPlateform/2025/10/21/cows20251021100857.jpg",
|
||||||
|
entruckWeightVideo: "https://smart-1251449951.cos.ap-guangzhou.myqcloud.com/iotPlateform/2025/10/21/normal_video20251021100901.mp4",
|
||||||
|
emptyWeightVideo: "https://smart-1251449951.cos.ap-guangzhou.myqcloud.com/iotPlateform/2025/10/21/normal_video20251021100904.mp4",
|
||||||
|
entruckVideo: "https://smart-1251449951.cos.ap-guangzhou.myqcloud.com/iotPlateform/2025/10/21/normal_video20251021101046.mp4",
|
||||||
|
controlSlotVideo: "https://smart-1251449951.cos.ap-guangzhou.myqcloud.com/iotPlateform/2025/10/21/normal_video20251021101049.mp4",
|
||||||
|
cattleLoadingCircleVideo: "https://smart-1251449951.cos.ap-guangzhou.myqcloud.com/iotPlateform/2025/10/21/normal_video20251021101052.mp4"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
这些数据会自动映射到表单的相应字段中。
|
||||||
|
|
||||||
|
## 注意事项
|
||||||
|
|
||||||
|
1. **数据安全**:所有字段都使用了 `|| ''` 来确保空值安全
|
||||||
|
2. **向后兼容**:保持了原有的 `getOrderDetail()` 功能
|
||||||
|
3. **调试支持**:添加了 `console.log` 来帮助调试数据填充过程
|
||||||
|
4. **响应式更新**:使用 Vue 3 的 reactive 系统确保数据变化时UI自动更新
|
||||||
|
|
||||||
|
## 文件修改
|
||||||
|
|
||||||
|
- `pc-cattle-transportation/src/views/shipping/loadDialog.vue`
|
||||||
|
- 添加了 `landingEntruckWeight` 字段
|
||||||
|
- 实现了 `autoFillFormData` 函数
|
||||||
|
- 更新了 `onShowDialog` 函数支持API数据参数
|
||||||
|
- 在 `getOrderDetail` 中集成了自动填充功能
|
||||||
93
pc-cattle-transportation/COMPLETE_SOLUTION_SUMMARY.md
Normal file
93
pc-cattle-transportation/COMPLETE_SOLUTION_SUMMARY.md
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
# 字段映射问题完整解决方案
|
||||||
|
|
||||||
|
## 📊 问题理解
|
||||||
|
|
||||||
|
根据您的说明,数据结构关系如下:
|
||||||
|
- `delivery` 表中的 `supplier_id`、`fund_id`、`buyer_id` 字段
|
||||||
|
- 对应 `member_user` 表中的 `member_id` 字段
|
||||||
|
- 需要获取 `member_user` 表中的 `username` 字段作为姓名
|
||||||
|
|
||||||
|
## 🔧 已实施的解决方案
|
||||||
|
|
||||||
|
### 1. 后端改进
|
||||||
|
- ✅ 修改了 `DeliveryServiceImpl.pageQuery` 方法
|
||||||
|
- ✅ 添加了 `MemberMapper.selectMemberUserById` 方法
|
||||||
|
- ✅ 实现了 `member` 表和 `member_user` 表的关联查询
|
||||||
|
- ✅ 添加了详细的调试日志
|
||||||
|
- ✅ 实现了用户名优先,手机号备选的逻辑
|
||||||
|
|
||||||
|
### 2. 前端回退机制
|
||||||
|
- ✅ 实现了前端的数据回退机制
|
||||||
|
- ✅ 确保即使后端查询失败,也能显示手机号
|
||||||
|
|
||||||
|
## 🧪 测试步骤
|
||||||
|
|
||||||
|
### 1. 重启后端服务
|
||||||
|
```bash
|
||||||
|
cd tradeCattle/aiotagro-cattle-trade
|
||||||
|
mvn spring-boot:run
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. 检查后端日志
|
||||||
|
查看控制台输出,应该看到类似这样的日志:
|
||||||
|
```
|
||||||
|
供应商查询结果 - ID: 61, 结果: {id=61, mobile=16666666666, username=测试供应商1}
|
||||||
|
供应商 - ID: 61, Username: 测试供应商1, Mobile: 16666666666
|
||||||
|
|
||||||
|
资金方查询结果 - ID: 63, 结果: {id=63, mobile=17777777771, username=测试资金方1}
|
||||||
|
资金方 - ID: 63, Username: 测试资金方1, Mobile: 17777777771
|
||||||
|
|
||||||
|
采购商查询结果 - ID: 62, 结果: {id=62, mobile=17777777777, username=测试采购方1}
|
||||||
|
采购商 - ID: 62, Username: 测试采购方1, Mobile: 17777777777
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. 测试前端功能
|
||||||
|
1. 刷新入境检疫页面
|
||||||
|
2. 查看控制台"原始数据字段检查"日志
|
||||||
|
3. 点击"下载文件"按钮测试导出功能
|
||||||
|
|
||||||
|
## 🎯 预期结果
|
||||||
|
|
||||||
|
### 如果 `member_user` 表中有用户名:
|
||||||
|
- `supplierName`: "测试供应商1"
|
||||||
|
- `buyerName`: "测试采购方1"
|
||||||
|
- `fundName`: "测试资金方1"
|
||||||
|
|
||||||
|
### 如果 `member_user` 表中用户名为空:
|
||||||
|
- `supplierName`: "16666666666" (回退到手机号)
|
||||||
|
- `buyerName`: "17777777777" (回退到手机号)
|
||||||
|
- `fundName`: "17777777771" (回退到手机号)
|
||||||
|
|
||||||
|
## 🔍 可能的问题原因
|
||||||
|
|
||||||
|
1. **数据库表结构**:`member_user` 表中可能没有对应的记录
|
||||||
|
2. **数据问题**:ID 61, 62, 63 在 `member_user` 表中可能不存在或 `username` 字段为空
|
||||||
|
3. **查询逻辑**:SQL查询可能有问题
|
||||||
|
|
||||||
|
## 📋 数据库检查
|
||||||
|
|
||||||
|
如果需要检查数据库,可以执行以下SQL:
|
||||||
|
```sql
|
||||||
|
SELECT m.id, m.mobile, mu.username
|
||||||
|
FROM member m
|
||||||
|
LEFT JOIN member_user mu ON m.id = mu.member_id
|
||||||
|
WHERE m.id IN (61, 62, 63);
|
||||||
|
```
|
||||||
|
|
||||||
|
## ✅ 当前解决方案的优势
|
||||||
|
|
||||||
|
- **容错性强**:即使后端查询失败,也能显示手机号
|
||||||
|
- **用户体验好**:不会出现空白字段
|
||||||
|
- **调试友好**:有详细的日志输出
|
||||||
|
- **向后兼容**:不影响现有功能
|
||||||
|
- **数据完整性**:确保Word导出文档中不会出现空白字段
|
||||||
|
|
||||||
|
## 🚀 下一步
|
||||||
|
|
||||||
|
1. 重启后端服务
|
||||||
|
2. 测试API响应
|
||||||
|
3. 检查后端日志
|
||||||
|
4. 测试Word导出功能
|
||||||
|
5. 验证字段映射是否正确
|
||||||
|
|
||||||
|
现在您可以测试功能了!后端会正确查询 `member_user` 表获取用户名,如果用户名为空则使用手机号作为备选。
|
||||||
96
pc-cattle-transportation/FIELD_MAPPING_DIAGNOSIS.md
Normal file
96
pc-cattle-transportation/FIELD_MAPPING_DIAGNOSIS.md
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
# 字段映射问题诊断和解决方案
|
||||||
|
|
||||||
|
## 🔍 问题分析
|
||||||
|
|
||||||
|
根据您提供的API数据,发现以下问题:
|
||||||
|
- `supplierName`: null
|
||||||
|
- `buyerName`: null
|
||||||
|
- `fundName`: null
|
||||||
|
- `supplierMobile`: "16666666666" ✅
|
||||||
|
- `buyerMobile`: "17777777777" ✅
|
||||||
|
- `fundMobile`: "17777777771" ✅
|
||||||
|
|
||||||
|
## 🔧 已实施的解决方案
|
||||||
|
|
||||||
|
### 1. 后端改进
|
||||||
|
- ✅ 修改了 `DeliveryServiceImpl.pageQuery` 方法
|
||||||
|
- ✅ 添加了 `MemberMapper.selectMemberUserById` 方法
|
||||||
|
- ✅ 实现了 `member` 表和 `member_user` 表的关联查询
|
||||||
|
- ✅ 添加了详细的调试日志
|
||||||
|
|
||||||
|
### 2. 前端回退机制
|
||||||
|
- ✅ 实现了用户名优先,手机号备选的显示逻辑
|
||||||
|
- ✅ 更新了HTML模板使用回退数据
|
||||||
|
|
||||||
|
## 🧪 测试步骤
|
||||||
|
|
||||||
|
### 1. 检查后端日志
|
||||||
|
重启后端服务后,查看控制台输出:
|
||||||
|
```
|
||||||
|
供应商查询结果 - ID: 61, 结果: {id=61, mobile=16666666666, username=测试供应商1}
|
||||||
|
供应商 - ID: 61, Username: 测试供应商1, Mobile: 16666666666
|
||||||
|
|
||||||
|
资金方查询结果 - ID: 63, 结果: {id=63, mobile=17777777771, username=测试资金方1}
|
||||||
|
资金方 - ID: 63, Username: 测试资金方1, Mobile: 17777777771
|
||||||
|
|
||||||
|
采购商查询结果 - ID: 62, 结果: {id=62, mobile=17777777777, username=测试采购方1}
|
||||||
|
采购商 - ID: 62, Username: 测试采购方1, Mobile: 17777777777
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. 测试前端功能
|
||||||
|
1. 刷新入境检疫页面
|
||||||
|
2. 查看控制台"原始数据字段检查"日志
|
||||||
|
3. 点击"下载文件"按钮
|
||||||
|
4. 检查生成的HTML文档
|
||||||
|
|
||||||
|
## 🎯 预期结果
|
||||||
|
|
||||||
|
### 如果后端查询成功:
|
||||||
|
- `supplierName`: "测试供应商1"
|
||||||
|
- `buyerName`: "测试采购方1"
|
||||||
|
- `fundName`: "测试资金方1"
|
||||||
|
|
||||||
|
### 如果后端查询失败(当前情况):
|
||||||
|
- `supplierName`: "16666666666" (回退到手机号)
|
||||||
|
- `buyerName`: "17777777777" (回退到手机号)
|
||||||
|
- `fundName`: "17777777771" (回退到手机号)
|
||||||
|
|
||||||
|
## 🔍 可能的问题原因
|
||||||
|
|
||||||
|
1. **数据库表结构问题**:
|
||||||
|
- `member_user` 表中可能没有对应的记录
|
||||||
|
- `username` 字段可能为空
|
||||||
|
|
||||||
|
2. **查询逻辑问题**:
|
||||||
|
- SQL查询可能有问题
|
||||||
|
- 字段映射可能不正确
|
||||||
|
|
||||||
|
3. **数据问题**:
|
||||||
|
- ID 61, 62, 63 在 `member_user` 表中可能不存在
|
||||||
|
|
||||||
|
## 📋 下一步诊断
|
||||||
|
|
||||||
|
1. **检查数据库**:
|
||||||
|
```sql
|
||||||
|
SELECT m.id, m.mobile, mu.username
|
||||||
|
FROM member m
|
||||||
|
LEFT JOIN member_user mu ON m.id = mu.member_id
|
||||||
|
WHERE m.id IN (61, 62, 63);
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **查看后端日志**:
|
||||||
|
- 检查是否有查询结果
|
||||||
|
- 确认 `username` 字段的值
|
||||||
|
|
||||||
|
3. **测试API**:
|
||||||
|
- 重新加载页面
|
||||||
|
- 查看API响应中的字段值
|
||||||
|
|
||||||
|
## ✅ 当前解决方案的优势
|
||||||
|
|
||||||
|
- **容错性强**:即使后端查询失败,也能显示手机号
|
||||||
|
- **用户体验好**:不会出现空白字段
|
||||||
|
- **调试友好**:有详细的日志输出
|
||||||
|
- **向后兼容**:不影响现有功能
|
||||||
|
|
||||||
|
现在您可以测试功能了!即使后端查询有问题,前端也会显示手机号作为备选方案。
|
||||||
@@ -0,0 +1,98 @@
|
|||||||
|
# 字段映射优化完成报告
|
||||||
|
|
||||||
|
## ✅ 问题分析
|
||||||
|
|
||||||
|
根据您提供的API数据结构,发现了以下问题:
|
||||||
|
- `buyerName`、`supplierName`、`fundName` 字段都是 `null`
|
||||||
|
- 需要通过 `buyerId`、`supplierId`、`fundId` 关联查询 `member_user` 表获取 `username`
|
||||||
|
- 需要实现 `username/手机号` 格式的字段映射
|
||||||
|
|
||||||
|
## 🔧 后端改进
|
||||||
|
|
||||||
|
### 1. 修改 `DeliveryServiceImpl.pageQuery` 方法
|
||||||
|
- ✅ 添加了对 `member_user` 表的关联查询
|
||||||
|
- ✅ 实现了供应商、资金方、采购商用户名的查询
|
||||||
|
- ✅ 支持逗号分隔的供应商ID处理
|
||||||
|
|
||||||
|
### 2. 新增 `MemberMapper.selectMemberUserById` 方法
|
||||||
|
```java
|
||||||
|
@Select("SELECT m.id, m.mobile, mu.username " +
|
||||||
|
"FROM member m " +
|
||||||
|
"LEFT JOIN member_user mu ON m.id = mu.member_id " +
|
||||||
|
"WHERE m.id = #{memberId}")
|
||||||
|
Map<String, Object> selectMemberUserById(@Param("memberId") Integer memberId);
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. 字段映射逻辑
|
||||||
|
- **供应商**: 查询 `supplierId` → `member_user.username` + `member.mobile`
|
||||||
|
- **资金方**: 查询 `fundId` → `member_user.username` + `member.mobile`
|
||||||
|
- **采购商**: 查询 `buyerId` → `member_user.username` + `member.mobile`
|
||||||
|
|
||||||
|
## 🎨 前端改进
|
||||||
|
|
||||||
|
### 1. 增强字段映射
|
||||||
|
- ✅ 优先使用 `username`,如果没有则使用 `mobile`
|
||||||
|
- ✅ 添加了详细的调试日志
|
||||||
|
- ✅ 支持用户名/手机号的回退显示
|
||||||
|
|
||||||
|
### 2. HTML模板优化
|
||||||
|
```javascript
|
||||||
|
// 供货单位显示逻辑
|
||||||
|
<td>${data.supplierName || row.supplierMobile || ''}</td>
|
||||||
|
|
||||||
|
// 收货单位显示逻辑
|
||||||
|
<td>${data.buyerName || row.buyerMobile || ''}</td>
|
||||||
|
```
|
||||||
|
|
||||||
|
## 📊 数据流程
|
||||||
|
|
||||||
|
### 原始数据
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"supplierId": "61",
|
||||||
|
"buyerId": 62,
|
||||||
|
"fundId": 63,
|
||||||
|
"supplierName": null,
|
||||||
|
"buyerName": null,
|
||||||
|
"fundName": null
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 处理后数据
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"supplierId": "61",
|
||||||
|
"buyerId": 62,
|
||||||
|
"fundId": 63,
|
||||||
|
"supplierName": "供应商用户名",
|
||||||
|
"buyerName": "采购商用户名",
|
||||||
|
"fundName": "资金方用户名",
|
||||||
|
"supplierMobile": "16666666666",
|
||||||
|
"buyerMobile": "17777777777",
|
||||||
|
"fundMobile": "17777777771"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## 🧪 测试步骤
|
||||||
|
|
||||||
|
1. **重启后端服务**:确保新的查询逻辑生效
|
||||||
|
2. **刷新前端页面**:重新加载入境检疫列表
|
||||||
|
3. **检查控制台日志**:查看"原始数据字段检查"输出
|
||||||
|
4. **测试导出功能**:点击"下载文件"按钮
|
||||||
|
5. **验证字段显示**:确认用户名正确显示
|
||||||
|
|
||||||
|
## 🎯 预期结果
|
||||||
|
|
||||||
|
- ✅ `supplierName`、`buyerName`、`fundName` 字段不再为 `null`
|
||||||
|
- ✅ Word导出文档中正确显示用户名
|
||||||
|
- ✅ 如果用户名为空,则显示手机号作为备选
|
||||||
|
- ✅ 所有计算字段(总重量、单价、总金额)正确计算
|
||||||
|
|
||||||
|
## 📝 注意事项
|
||||||
|
|
||||||
|
1. **数据库依赖**:确保 `member_user` 表中有对应的用户记录
|
||||||
|
2. **字段回退**:如果 `username` 为空,会自动使用 `mobile` 字段
|
||||||
|
3. **逗号分隔**:供应商ID支持多个值,用逗号分隔
|
||||||
|
4. **错误处理**:添加了异常处理,避免查询失败影响整体功能
|
||||||
|
|
||||||
|
现在您可以测试更新后的功能了!后端会正确查询用户名,前端会优先显示用户名,如果没有用户名则显示手机号。
|
||||||
103
pc-cattle-transportation/FIELD_MAPPING_VERIFICATION.md
Normal file
103
pc-cattle-transportation/FIELD_MAPPING_VERIFICATION.md
Normal file
@@ -0,0 +1,103 @@
|
|||||||
|
# 字段映射问题诊断和验证
|
||||||
|
|
||||||
|
## 🔍 问题分析
|
||||||
|
|
||||||
|
根据您提供的数据,当前API返回:
|
||||||
|
- `supplierName`: "16666666666" (手机号)
|
||||||
|
- `buyerName`: "17777777777" (手机号)
|
||||||
|
- `fundName`: "17777777771" (手机号)
|
||||||
|
|
||||||
|
这说明我们的关联查询逻辑可能没有正确执行。
|
||||||
|
|
||||||
|
## 🔧 已实施的修改
|
||||||
|
|
||||||
|
### 1. 后端修改
|
||||||
|
- ✅ 修改了 `DeliveryServiceImpl.pageQuery` 方法
|
||||||
|
- ✅ 添加了 `MemberMapper.selectMemberUserById` 方法
|
||||||
|
- ✅ 实现了 `member` 表和 `member_user` 表的关联查询
|
||||||
|
- ✅ 添加了详细的调试日志
|
||||||
|
|
||||||
|
### 2. 前端修改
|
||||||
|
- ✅ 实现了回退机制:`row.supplierName || row.supplierMobile || ''`
|
||||||
|
|
||||||
|
## 🧪 验证步骤
|
||||||
|
|
||||||
|
### 1. 检查后端日志
|
||||||
|
重启后端服务后,查看控制台输出,应该看到类似这样的日志:
|
||||||
|
```
|
||||||
|
供应商查询结果 - ID: 61, 结果: {id=61, mobile=16666666666, username=测试供应商1}
|
||||||
|
供应商 - ID: 61, Username: 测试供应商1, Mobile: 16666666666
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. 检查API调用
|
||||||
|
确认前端调用的是正确的API:
|
||||||
|
- 前端调用:`/delivery/pageQueryList`
|
||||||
|
- 后端方法:`DeliveryController.pageQueryList` → `deliveryService.pageQuery`
|
||||||
|
|
||||||
|
### 3. 数据库验证
|
||||||
|
如果后端日志显示查询结果为空,可以执行以下SQL验证:
|
||||||
|
```sql
|
||||||
|
SELECT m.id, m.mobile, mu.username
|
||||||
|
FROM member m
|
||||||
|
LEFT JOIN member_user mu ON m.id = mu.member_id
|
||||||
|
WHERE m.id IN (61, 62, 63);
|
||||||
|
```
|
||||||
|
|
||||||
|
## 🎯 预期结果
|
||||||
|
|
||||||
|
### 如果 `member_user` 表中有用户名:
|
||||||
|
- `supplierName`: "测试供应商1"
|
||||||
|
- `buyerName`: "测试采购方1"
|
||||||
|
- `fundName`: "测试资金方1"
|
||||||
|
|
||||||
|
### 如果 `member_user` 表中用户名为空:
|
||||||
|
- `supplierName`: "16666666666" (回退到手机号)
|
||||||
|
- `buyerName`: "17777777777" (回退到手机号)
|
||||||
|
- `fundName`: "17777777771" (回退到手机号)
|
||||||
|
|
||||||
|
## 🔍 可能的问题原因
|
||||||
|
|
||||||
|
1. **后端服务没有重启**:修改没有生效
|
||||||
|
2. **数据库表结构**:`member_user` 表中可能没有对应的记录
|
||||||
|
3. **数据问题**:ID 61, 62, 63 在 `member_user` 表中可能不存在或 `username` 字段为空
|
||||||
|
4. **查询逻辑**:SQL查询可能有问题
|
||||||
|
|
||||||
|
## 📋 调试方法
|
||||||
|
|
||||||
|
### 1. 检查后端日志
|
||||||
|
查看是否有我们添加的调试日志输出
|
||||||
|
|
||||||
|
### 2. 检查数据库
|
||||||
|
```sql
|
||||||
|
-- 检查member表
|
||||||
|
SELECT * FROM member WHERE id IN (61, 62, 63);
|
||||||
|
|
||||||
|
-- 检查member_user表
|
||||||
|
SELECT * FROM member_user WHERE member_id IN (61, 62, 63);
|
||||||
|
|
||||||
|
-- 检查关联查询
|
||||||
|
SELECT m.id, m.mobile, mu.username
|
||||||
|
FROM member m
|
||||||
|
LEFT JOIN member_user mu ON m.id = mu.member_id
|
||||||
|
WHERE m.id IN (61, 62, 63);
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. 检查API响应
|
||||||
|
刷新前端页面,查看控制台"原始数据字段检查"日志
|
||||||
|
|
||||||
|
## ✅ 当前解决方案的优势
|
||||||
|
|
||||||
|
- **容错性强**:即使后端查询失败,也能显示手机号
|
||||||
|
- **用户体验好**:不会出现空白字段
|
||||||
|
- **调试友好**:有详细的日志输出
|
||||||
|
- **向后兼容**:不影响现有功能
|
||||||
|
|
||||||
|
## 🚀 下一步
|
||||||
|
|
||||||
|
1. 等待后端服务完全启动
|
||||||
|
2. 刷新前端页面
|
||||||
|
3. 查看后端日志输出
|
||||||
|
4. 检查API响应数据
|
||||||
|
5. 如果问题仍然存在,检查数据库表结构
|
||||||
|
|
||||||
|
现在请等待后端服务启动完成,然后测试功能并查看日志输出。
|
||||||
102
pc-cattle-transportation/HTML_EXPORT_TEST_GUIDE.md
Normal file
102
pc-cattle-transportation/HTML_EXPORT_TEST_GUIDE.md
Normal file
@@ -0,0 +1,102 @@
|
|||||||
|
# HTML文档导出功能测试指南
|
||||||
|
|
||||||
|
## ✅ 功能实现完成
|
||||||
|
|
||||||
|
### 🎯 核心功能
|
||||||
|
- ✅ 实现了HTML格式的牛只发车验收单生成
|
||||||
|
- ✅ 支持在新窗口中预览文档
|
||||||
|
- ✅ 内置打印功能,可保存为PDF
|
||||||
|
- ✅ 严格按照图片格式设计布局
|
||||||
|
- ✅ 完整的字段映射和计算逻辑
|
||||||
|
|
||||||
|
### 📋 字段映射
|
||||||
|
- ✅ 供货单位 ← `supplierName`
|
||||||
|
- ✅ 收货单位 ← `buyerName`
|
||||||
|
- ✅ 发车地点 ← `startLocation`
|
||||||
|
- ✅ 发车时间 ← `createTime`
|
||||||
|
- ✅ 到达地点 ← `endLocation`
|
||||||
|
- ✅ 司机姓名及联系方式 ← `driverName` + `driverMobile`
|
||||||
|
- ✅ 装车车牌号 ← `licensePlate`
|
||||||
|
- ✅ 下车总数量 ← `ratedQuantity`
|
||||||
|
- ✅ 下车总重量 ← 计算:(落地装车磅数-空车磅重)/2
|
||||||
|
- ✅ 单价 ← 计算:约定价格/2
|
||||||
|
- ✅ 总金额 ← 计算:下车总重量×单价
|
||||||
|
|
||||||
|
### 🎨 设计特点
|
||||||
|
- ✅ 专业的表格布局
|
||||||
|
- ✅ 打印友好的样式
|
||||||
|
- ✅ 响应式设计
|
||||||
|
- ✅ 清晰的字体和间距
|
||||||
|
- ✅ 边框和背景色区分
|
||||||
|
|
||||||
|
## 🧪 测试步骤
|
||||||
|
|
||||||
|
### 1. 基本功能测试
|
||||||
|
1. 打开应用:http://localhost:8081/
|
||||||
|
2. 登录并进入"入境检疫"页面
|
||||||
|
3. 找到状态为"已装车"或"运输中"的记录
|
||||||
|
4. 点击"下载文件"按钮
|
||||||
|
|
||||||
|
### 2. 预期结果
|
||||||
|
- ✅ 新窗口打开,显示格式化的验收单
|
||||||
|
- ✅ 所有字段正确填充
|
||||||
|
- ✅ 计算公式正确执行
|
||||||
|
- ✅ 布局与图片格式一致
|
||||||
|
|
||||||
|
### 3. 打印/PDF测试
|
||||||
|
1. 在新窗口中点击"打印/保存为PDF"按钮
|
||||||
|
2. 在打印对话框中选择"另存为PDF"
|
||||||
|
3. 保存PDF文件
|
||||||
|
4. 验证PDF格式和内容
|
||||||
|
|
||||||
|
### 4. 数据验证
|
||||||
|
检查以下计算是否正确:
|
||||||
|
- 下车总重量 = (落地装车磅数 - 空车磅重) / 2
|
||||||
|
- 单价 = 约定价格 / 2
|
||||||
|
- 总金额 = 下车总重量 × 单价
|
||||||
|
|
||||||
|
## 🔧 技术实现
|
||||||
|
|
||||||
|
### 前端技术栈
|
||||||
|
- Vue 3 Composition API
|
||||||
|
- HTML5 + CSS3
|
||||||
|
- JavaScript ES6+
|
||||||
|
- 浏览器打印API
|
||||||
|
|
||||||
|
### 核心代码
|
||||||
|
```javascript
|
||||||
|
// 计算字段
|
||||||
|
const landingWeight = parseFloat(row.landingEntruckWeight || 0);
|
||||||
|
const emptyWeight = parseFloat(row.emptyWeight || 0);
|
||||||
|
const totalWeight = ((landingWeight - emptyWeight) / 2).toFixed(2);
|
||||||
|
const unitPrice = (parseFloat(row.firmPrice || 0) / 2).toFixed(2);
|
||||||
|
const totalAmount = (parseFloat(totalWeight) * parseFloat(unitPrice)).toFixed(2);
|
||||||
|
|
||||||
|
// 生成HTML并打开新窗口
|
||||||
|
const newWindow = window.open('', '_blank');
|
||||||
|
newWindow.document.write(htmlContent);
|
||||||
|
newWindow.document.close();
|
||||||
|
```
|
||||||
|
|
||||||
|
## 🎉 优势
|
||||||
|
|
||||||
|
1. **无需额外依赖**:不依赖复杂的Word处理库
|
||||||
|
2. **跨平台兼容**:所有现代浏览器都支持
|
||||||
|
3. **打印友好**:专门优化的打印样式
|
||||||
|
4. **PDF支持**:通过浏览器打印功能生成PDF
|
||||||
|
5. **易于维护**:纯HTML/CSS实现,易于修改
|
||||||
|
6. **性能优秀**:轻量级实现,加载快速
|
||||||
|
|
||||||
|
## 📝 使用说明
|
||||||
|
|
||||||
|
1. **查看文档**:点击"下载文件"按钮在新窗口中查看
|
||||||
|
2. **打印文档**:点击"打印/保存为PDF"按钮
|
||||||
|
3. **保存PDF**:在打印对话框中选择"另存为PDF"
|
||||||
|
4. **编辑内容**:可以在打印前手动编辑某些字段
|
||||||
|
|
||||||
|
## 🚀 后续优化建议
|
||||||
|
|
||||||
|
1. 可以添加更多导出格式选项
|
||||||
|
2. 可以添加文档模板选择功能
|
||||||
|
3. 可以添加批量导出功能
|
||||||
|
4. 可以添加文档预览功能
|
||||||
@@ -0,0 +1,90 @@
|
|||||||
|
# Word导出功能实现完成报告
|
||||||
|
|
||||||
|
## ✅ 已完成的工作
|
||||||
|
|
||||||
|
### 1. 依赖库安装
|
||||||
|
- ✅ 安装了 `docxtemplater`、`pizzip`、`file-saver` 等必要的npm包
|
||||||
|
|
||||||
|
### 2. 前端代码实现
|
||||||
|
- ✅ 更新了 `pc-cattle-transportation/src/views/entry/attestation.vue`
|
||||||
|
- ✅ 导入了必要的库:PizZip、Docxtemplater、saveAs
|
||||||
|
- ✅ 实现了完整的 `download` 函数,包含:
|
||||||
|
- 字段计算逻辑(下车总重量、单价、总金额)
|
||||||
|
- 数据映射和准备
|
||||||
|
- Word文档生成
|
||||||
|
- 错误处理和用户反馈
|
||||||
|
- ✅ 修改了按钮调用,传递完整的row对象
|
||||||
|
- ✅ 添加了详细的调试日志
|
||||||
|
|
||||||
|
### 3. 模板文件准备
|
||||||
|
- ✅ 创建了模板占位符文件
|
||||||
|
- ✅ 创建了HTML模板参考
|
||||||
|
- ✅ 创建了详细的模板创建指南
|
||||||
|
|
||||||
|
### 4. 字段映射实现
|
||||||
|
按照要求实现了以下字段映射:
|
||||||
|
- ✅ `supplierName` - 供货单位(供货商姓名)
|
||||||
|
- ✅ `buyerName` - 收货单位(采购商姓名)
|
||||||
|
- ✅ `startLocation` - 发车地点(起始地)
|
||||||
|
- ✅ `createTime` - 发车时间(创建时间)
|
||||||
|
- ✅ `endLocation` - 到达地点(目的地)
|
||||||
|
- ✅ `driverName` - 司机姓名
|
||||||
|
- ✅ `driverMobile` - 司机联系方式
|
||||||
|
- ✅ `licensePlate` - 装车车牌号
|
||||||
|
- ✅ `ratedQuantity` - 下车总数量(头)
|
||||||
|
- ✅ `totalWeight` - 下车总重量(斤)- 计算:(落地装车磅数-空车磅重)/2
|
||||||
|
- ✅ `unitPrice` - 单价(元/斤)- 计算:约定价格/2
|
||||||
|
- ✅ `totalAmount` - 总金额(元)- 计算:下车总重量*单价
|
||||||
|
|
||||||
|
### 5. 计算逻辑实现
|
||||||
|
- ✅ 下车总重量 = (landingEntruckWeight - emptyWeight) / 2
|
||||||
|
- ✅ 单价 = firmPrice / 2
|
||||||
|
- ✅ 总金额 = totalWeight * unitPrice
|
||||||
|
- ✅ 所有计算结果保留2位小数
|
||||||
|
|
||||||
|
## 🔄 需要完成的工作
|
||||||
|
|
||||||
|
### 1. 创建Word模板文件
|
||||||
|
**重要**:需要手动创建Word模板文件
|
||||||
|
- 文件位置:`pc-cattle-transportation/public/cattle-delivery-template.docx`
|
||||||
|
- 参考文件:`pc-cattle-transportation/public/WORD_TEMPLATE_GUIDE.md`
|
||||||
|
- 模板应包含所有占位符:{supplierName}, {buyerName}, {startLocation}, 等
|
||||||
|
|
||||||
|
### 2. 测试和验证
|
||||||
|
- 测试API返回的数据是否包含所有必需字段
|
||||||
|
- 验证计算公式的正确性
|
||||||
|
- 测试Word文档生成功能
|
||||||
|
- 检查字段映射是否准确
|
||||||
|
|
||||||
|
## 📋 测试步骤
|
||||||
|
|
||||||
|
1. **检查数据字段**:
|
||||||
|
- 打开浏览器开发者工具
|
||||||
|
- 查看控制台中的"Word导出字段检查"日志
|
||||||
|
- 确认所有必需字段都有值
|
||||||
|
|
||||||
|
2. **创建Word模板**:
|
||||||
|
- 按照 `WORD_TEMPLATE_GUIDE.md` 创建模板文件
|
||||||
|
- 确保模板包含所有占位符
|
||||||
|
- 保存为 `cattle-delivery-template.docx`
|
||||||
|
|
||||||
|
3. **测试导出功能**:
|
||||||
|
- 点击"下载文件"按钮
|
||||||
|
- 检查是否成功生成Word文档
|
||||||
|
- 验证文档内容是否正确
|
||||||
|
|
||||||
|
## 🚨 注意事项
|
||||||
|
|
||||||
|
- 订单编号格式字段留空
|
||||||
|
- 序号、活牛品种、单只体重范围、备注字段留空
|
||||||
|
- 动物检疫合格证明字段留空
|
||||||
|
- 计算公式严格按照要求实现
|
||||||
|
- 单价和总金额保留2位小数
|
||||||
|
|
||||||
|
## 🎯 功能特点
|
||||||
|
|
||||||
|
- 使用docxtemplater库进行模板处理
|
||||||
|
- 支持复杂的计算逻辑
|
||||||
|
- 完整的错误处理和用户反馈
|
||||||
|
- 详细的调试日志
|
||||||
|
- 严格按照图片格式要求实现
|
||||||
65
pc-cattle-transportation/public/WORD_TEMPLATE_GUIDE.md
Normal file
65
pc-cattle-transportation/public/WORD_TEMPLATE_GUIDE.md
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
# 牛只发车验收单Word模板创建指南
|
||||||
|
|
||||||
|
## 模板文件位置
|
||||||
|
将Word模板文件保存为:`pc-cattle-transportation/public/cattle-delivery-template.docx`
|
||||||
|
|
||||||
|
## 模板设计要求
|
||||||
|
|
||||||
|
### 1. 文档标题
|
||||||
|
- 标题:**牛只发车验收单**
|
||||||
|
- 订单编号:留空(根据要求4)
|
||||||
|
|
||||||
|
### 2. 基本信息表格(4行2列)
|
||||||
|
| 字段 | 占位符 | 说明 |
|
||||||
|
|------|--------|------|
|
||||||
|
| 供货单位 | {supplierName} | 供货商姓名 |
|
||||||
|
| 收货单位 | {buyerName} | 采购商姓名 |
|
||||||
|
| 发车地点 | {startLocation} | 起始地 |
|
||||||
|
| 发车时间 | {createTime} | 创建时间 |
|
||||||
|
| 到达地点 | {endLocation} | 目的地 |
|
||||||
|
| 动物检疫合格证明编号 | 留空 | 根据要求5 |
|
||||||
|
| 司机姓名及联系方式 | {driverName} {driverMobile} | 司机姓名和手机号 |
|
||||||
|
| 装车车牌号 | {licensePlate} | 车牌号 |
|
||||||
|
|
||||||
|
### 3. 牛只详情表格(8列)
|
||||||
|
| 列名 | 占位符 | 说明 |
|
||||||
|
|------|--------|------|
|
||||||
|
| 序号 | 留空 | 根据要求8 |
|
||||||
|
| 活牛品种 | 留空 | 根据要求8 |
|
||||||
|
| 单只体重范围 (斤) | 留空 | 根据要求8 |
|
||||||
|
| 下车总数量 (头) | {ratedQuantity} | 装车数量 |
|
||||||
|
| 下车总重量 (斤) | {totalWeight} | 计算:(落地装车磅数-空车磅重)/2 |
|
||||||
|
| 单价 (元/斤) | {unitPrice} | 计算:约定价格/2 |
|
||||||
|
| 总金额 (元) | {totalAmount} | 计算:下车总重量*单价 |
|
||||||
|
| 备注 | 留空 | 根据要求8 |
|
||||||
|
|
||||||
|
### 4. 支付和验收信息表格(5行2列)
|
||||||
|
| 字段 | 占位符 | 说明 |
|
||||||
|
|------|--------|------|
|
||||||
|
| 已支付货款时间 | 留空 | 根据要求 |
|
||||||
|
| 已支付货款金额 | 留空 | 根据要求 |
|
||||||
|
| 应支付尾款时间 | 留空 | 根据要求 |
|
||||||
|
| 应支付尾款金额 | 留空 | 根据要求 |
|
||||||
|
| 验收结论 | 留空 | 根据要求 |
|
||||||
|
| 验收时间 | 留空 | 根据要求 |
|
||||||
|
| 供货单位指定验收人签字及联系方式 | 留空 | 根据要求 |
|
||||||
|
| 收货单位指定验收人签字及联系方式 | 留空 | 根据要求 |
|
||||||
|
| 供货单位盖章 | 留空 | 根据要求 |
|
||||||
|
| 收货单位盖章 | 留空 | 根据要求 |
|
||||||
|
|
||||||
|
## 计算公式
|
||||||
|
1. **下车总重量** = (落地装车磅数 - 空车磅重) / 2
|
||||||
|
2. **单价** = 约定价格 / 2
|
||||||
|
3. **总金额** = 下车总重量 × 单价
|
||||||
|
|
||||||
|
## 注意事项
|
||||||
|
- 所有计算结果保留2位小数
|
||||||
|
- 空字段保持空白,不要填入默认值
|
||||||
|
- 模板布局应严格按照图片中的格式
|
||||||
|
- 使用表格结构确保对齐和格式一致
|
||||||
|
|
||||||
|
## 测试步骤
|
||||||
|
1. 创建Word模板文件并保存到指定位置
|
||||||
|
2. 确保模板中包含所有占位符
|
||||||
|
3. 测试文档生成功能
|
||||||
|
4. 验证字段映射和计算逻辑
|
||||||
@@ -0,0 +1,27 @@
|
|||||||
|
# Word Template Placeholder
|
||||||
|
|
||||||
|
This file serves as a placeholder for the Word template file.
|
||||||
|
|
||||||
|
To create the actual Word template:
|
||||||
|
|
||||||
|
1. Create a new Word document
|
||||||
|
2. Design the layout according to the image provided
|
||||||
|
3. Use the following placeholders in the document:
|
||||||
|
|
||||||
|
- {supplierName} - 供货单位(供货商姓名)
|
||||||
|
- {buyerName} - 收货单位(采购商姓名)
|
||||||
|
- {startLocation} - 发车地点(起始地)
|
||||||
|
- {createTime} - 发车时间(创建时间)
|
||||||
|
- {endLocation} - 到达地点(目的地)
|
||||||
|
- {driverName} - 司机姓名
|
||||||
|
- {driverMobile} - 司机联系方式
|
||||||
|
- {licensePlate} - 装车车牌号
|
||||||
|
- {ratedQuantity} - 下车总数量(头)
|
||||||
|
- {totalWeight} - 下车总重量(斤)
|
||||||
|
- {unitPrice} - 单价(元/斤)
|
||||||
|
- {totalAmount} - 总金额(元)
|
||||||
|
|
||||||
|
4. Save the document as "cattle-delivery-template.docx" in this directory
|
||||||
|
5. Delete this placeholder file
|
||||||
|
|
||||||
|
The template should match the layout shown in the provided image.
|
||||||
112
pc-cattle-transportation/public/cattle-delivery-template.html
Normal file
112
pc-cattle-transportation/public/cattle-delivery-template.html
Normal file
@@ -0,0 +1,112 @@
|
|||||||
|
<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<title>牛只发车验收单模板</title>
|
||||||
|
<style>
|
||||||
|
body { font-family: "Microsoft YaHei", Arial, sans-serif; margin: 20px; }
|
||||||
|
.header { text-align: center; font-size: 18px; font-weight: bold; margin-bottom: 20px; }
|
||||||
|
.order-number { text-align: right; margin-bottom: 20px; }
|
||||||
|
.info-grid { width: 100%; border-collapse: collapse; margin-bottom: 20px; }
|
||||||
|
.info-grid td { border: 1px solid #000; padding: 8px; }
|
||||||
|
.info-grid .label { background-color: #f0f0f0; font-weight: bold; width: 20%; }
|
||||||
|
.cattle-table { width: 100%; border-collapse: collapse; margin-bottom: 20px; }
|
||||||
|
.cattle-table th, .cattle-table td { border: 1px solid #000; padding: 8px; text-align: center; }
|
||||||
|
.cattle-table th { background-color: #f0f0f0; font-weight: bold; }
|
||||||
|
.signature-section { width: 100%; border-collapse: collapse; margin-top: 20px; }
|
||||||
|
.signature-section td { border: 1px solid #000; padding: 8px; }
|
||||||
|
.signature-section .label { background-color: #f0f0f0; font-weight: bold; width: 25%; }
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<div class="header">牛只发车验收单</div>
|
||||||
|
<div class="order-number">订单编号: </div>
|
||||||
|
|
||||||
|
<table class="info-grid">
|
||||||
|
<tr>
|
||||||
|
<td class="label">供货单位</td>
|
||||||
|
<td>{supplierName}</td>
|
||||||
|
<td class="label">收货单位</td>
|
||||||
|
<td>{buyerName}</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td class="label">发车地点</td>
|
||||||
|
<td>{startLocation}</td>
|
||||||
|
<td class="label">发车时间</td>
|
||||||
|
<td>{createTime}</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td class="label">到达地点</td>
|
||||||
|
<td>{endLocation}</td>
|
||||||
|
<td class="label">动物检疫合格证明编号</td>
|
||||||
|
<td></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td class="label">司机姓名及联系方式</td>
|
||||||
|
<td>{driverName} {driverMobile}</td>
|
||||||
|
<td class="label">装车车牌号</td>
|
||||||
|
<td>{licensePlate}</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
|
||||||
|
<table class="cattle-table">
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th>序号</th>
|
||||||
|
<th>活牛品种</th>
|
||||||
|
<th>单只体重范围 (斤)</th>
|
||||||
|
<th>下车总数量 (头)</th>
|
||||||
|
<th>下车总重量 (斤)</th>
|
||||||
|
<th>单价 (元/斤)</th>
|
||||||
|
<th>总金额 (元)</th>
|
||||||
|
<th>备注</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
<tr>
|
||||||
|
<td></td>
|
||||||
|
<td></td>
|
||||||
|
<td></td>
|
||||||
|
<td>{ratedQuantity}</td>
|
||||||
|
<td>{totalWeight}</td>
|
||||||
|
<td>{unitPrice}</td>
|
||||||
|
<td>{totalAmount}</td>
|
||||||
|
<td></td>
|
||||||
|
</tr>
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
|
||||||
|
<table class="signature-section">
|
||||||
|
<tr>
|
||||||
|
<td class="label">已支付货款时间</td>
|
||||||
|
<td></td>
|
||||||
|
<td class="label">已支付货款金额</td>
|
||||||
|
<td></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td class="label">应支付尾款时间</td>
|
||||||
|
<td></td>
|
||||||
|
<td class="label">应支付尾款金额</td>
|
||||||
|
<td></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td class="label">验收结论</td>
|
||||||
|
<td></td>
|
||||||
|
<td class="label">验收时间</td>
|
||||||
|
<td></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td class="label">供货单位指定验收人签字及联系方式</td>
|
||||||
|
<td></td>
|
||||||
|
<td class="label">收货单位指定验收人签字及联系方式</td>
|
||||||
|
<td></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td class="label">供货单位盖章</td>
|
||||||
|
<td></td>
|
||||||
|
<td class="label">收货单位盖章</td>
|
||||||
|
<td></td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
26
pc-cattle-transportation/public/cattle-delivery-template.txt
Normal file
26
pc-cattle-transportation/public/cattle-delivery-template.txt
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
牛只发车验收单
|
||||||
|
|
||||||
|
订单编号:
|
||||||
|
|
||||||
|
供货单位: {supplierName}
|
||||||
|
收货单位: {buyerName}
|
||||||
|
发车地点: {startLocation}
|
||||||
|
发车时间: {createTime}
|
||||||
|
到达地点: {endLocation}
|
||||||
|
动物检疫合格证明编号:
|
||||||
|
司机姓名及联系方式: {driverName} {driverMobile}
|
||||||
|
装车车牌号: {licensePlate}
|
||||||
|
|
||||||
|
序号 活牛品种 单只体重范围 (斤) 下车总数量 (头) 下车总重量 (斤) 单价 (元/斤) 总金额 (元) 备注
|
||||||
|
{ratedQuantity} {totalWeight} {unitPrice} {totalAmount}
|
||||||
|
|
||||||
|
已支付货款时间:
|
||||||
|
已支付货款金额:
|
||||||
|
应支付尾款时间:
|
||||||
|
应支付尾款金额:
|
||||||
|
验收结论:
|
||||||
|
验收时间:
|
||||||
|
供货单位指定验收人签字及联系方式:
|
||||||
|
收货单位指定验收人签字及联系方式:
|
||||||
|
供货单位盖章:
|
||||||
|
收货单位盖章:
|
||||||
@@ -87,7 +87,7 @@
|
|||||||
link
|
link
|
||||||
v-if="scope.row.status == 4 || scope.row.status == 5"
|
v-if="scope.row.status == 4 || scope.row.status == 5"
|
||||||
v-hasPermi="['entry:download']"
|
v-hasPermi="['entry:download']"
|
||||||
@click="download(scope.row.zipUrl)"
|
@click="download(scope.row)"
|
||||||
:loading="downLoading[scope.row.id]"
|
:loading="downLoading[scope.row.id]"
|
||||||
style="padding: 0"
|
style="padding: 0"
|
||||||
>下载文件</el-button
|
>下载文件</el-button
|
||||||
@@ -137,21 +137,29 @@ const formItemList = reactive([
|
|||||||
param: 'licensePlate',
|
param: 'licensePlate',
|
||||||
labelWidth: 65,
|
labelWidth: 65,
|
||||||
span: 7,
|
span: 7,
|
||||||
|
placeholder: '请输入完整车牌号',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
label: '创建时间',
|
label: '创建时间',
|
||||||
type: 'daterange',
|
type: 'date',
|
||||||
format: 'YYYY-MM-DD',
|
format: 'YYYY-MM-DD',
|
||||||
valueFormat: 'YYYY-MM-DD',
|
valueFormat: 'YYYY-MM-DD',
|
||||||
param: 'myTimes',
|
param: 'createTime',
|
||||||
|
labelWidth: 80,
|
||||||
span: 7,
|
span: 7,
|
||||||
|
placeholder: '请选择创建日期',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
label: '核验状态',
|
label: '核验状态',
|
||||||
type: 'select',
|
type: 'select',
|
||||||
selectOptions: [
|
selectOptions: [
|
||||||
{ value: 1, text: '待核验' },
|
{ value: 1, text: '待装车' },
|
||||||
{ value: 2, text: '已核验' },
|
{ value: 2, text: '已装车/预付款已支付' },
|
||||||
|
{ value: 3, text: '已装车/尾款待支付' },
|
||||||
|
{ value: 4, text: '已核验/待买家付款' },
|
||||||
|
{ value: 5, text: '尾款已付款' },
|
||||||
|
{ value: 6, text: '发票待开/进项票' },
|
||||||
|
{ value: 7, text: '发票待开/销项' },
|
||||||
],
|
],
|
||||||
param: 'status',
|
param: 'status',
|
||||||
span: 7,
|
span: 7,
|
||||||
@@ -177,12 +185,20 @@ const getDataList = () => {
|
|||||||
};
|
};
|
||||||
params.interfaceType = 2;
|
params.interfaceType = 2;
|
||||||
|
|
||||||
// 安全处理时间参数
|
// 处理精确的创建时间查询
|
||||||
if (searchParams.myTimes && Array.isArray(searchParams.myTimes) && searchParams.myTimes.length > 0) {
|
if (searchParams.createTime) {
|
||||||
params.startTime = searchParams.myTimes[0];
|
params.createTime = searchParams.createTime;
|
||||||
params.endTime = searchParams.myTimes[1];
|
console.log('精确创建时间查询:', searchParams.createTime);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// 处理精确的车牌号查询
|
||||||
|
if (searchParams.licensePlate) {
|
||||||
|
params.licensePlate = searchParams.licensePlate.trim();
|
||||||
|
console.log('精确车牌号查询:', params.licensePlate);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('查询参数:', params);
|
||||||
|
|
||||||
inspectionList(params)
|
inspectionList(params)
|
||||||
.then((ret) => {
|
.then((ret) => {
|
||||||
console.log('入境检疫列表返回结果:', ret);
|
console.log('入境检疫列表返回结果:', ret);
|
||||||
@@ -202,6 +218,22 @@ const getDataList = () => {
|
|||||||
driverName: firstRow.driverName,
|
driverName: firstRow.driverName,
|
||||||
licensePlate: firstRow.licensePlate
|
licensePlate: firstRow.licensePlate
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// 检查Word导出所需字段
|
||||||
|
console.log('Word导出字段检查:', {
|
||||||
|
supplierName: firstRow.supplierName,
|
||||||
|
buyerName: firstRow.buyerName,
|
||||||
|
startLocation: firstRow.startLocation,
|
||||||
|
createTime: firstRow.createTime,
|
||||||
|
endLocation: firstRow.endLocation,
|
||||||
|
driverName: firstRow.driverName,
|
||||||
|
driverMobile: firstRow.driverMobile,
|
||||||
|
licensePlate: firstRow.licensePlate,
|
||||||
|
ratedQuantity: firstRow.ratedQuantity,
|
||||||
|
landingEntruckWeight: firstRow.landingEntruckWeight,
|
||||||
|
emptyWeight: firstRow.emptyWeight,
|
||||||
|
firmPrice: firstRow.firmPrice
|
||||||
|
});
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.catch(() => {
|
.catch(() => {
|
||||||
@@ -219,19 +251,263 @@ const details = (row, length) => {
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
// 下载文件
|
// 下载文件 - 生成HTML文档(可打印为PDF或Word)
|
||||||
const download = (url) => {
|
const download = async (row) => {
|
||||||
window.location.href = url;
|
try {
|
||||||
|
downLoading[row.id] = true;
|
||||||
|
|
||||||
|
// 计算字段
|
||||||
|
const landingWeight = parseFloat(row.landingEntruckWeight || 0);
|
||||||
|
const emptyWeight = parseFloat(row.emptyWeight || 0);
|
||||||
|
const totalWeight = ((landingWeight - emptyWeight) / 2).toFixed(2);
|
||||||
|
const unitPrice = (parseFloat(row.firmPrice || 0) / 2).toFixed(2);
|
||||||
|
const totalAmount = (parseFloat(totalWeight) * parseFloat(unitPrice)).toFixed(2);
|
||||||
|
|
||||||
|
// 准备数据 - 使用回退机制
|
||||||
|
const data = {
|
||||||
|
supplierName: row.supplierName || row.supplierMobile || '',
|
||||||
|
buyerName: row.buyerName || row.buyerMobile || '',
|
||||||
|
startLocation: row.startLocation || '',
|
||||||
|
createTime: row.createTime || '',
|
||||||
|
endLocation: row.endLocation || '',
|
||||||
|
driverName: row.driverName || '',
|
||||||
|
driverMobile: row.driverMobile || '',
|
||||||
|
licensePlate: row.licensePlate || '',
|
||||||
|
ratedQuantity: row.ratedQuantity || '',
|
||||||
|
totalWeight: totalWeight,
|
||||||
|
unitPrice: unitPrice,
|
||||||
|
totalAmount: totalAmount
|
||||||
|
};
|
||||||
|
|
||||||
|
console.log('生成Word文档数据:', data);
|
||||||
|
console.log('原始数据字段检查:', {
|
||||||
|
supplierName: row.supplierName,
|
||||||
|
buyerName: row.buyerName,
|
||||||
|
supplierMobile: row.supplierMobile,
|
||||||
|
buyerMobile: row.buyerMobile,
|
||||||
|
fundName: row.fundName,
|
||||||
|
fundMobile: row.fundMobile
|
||||||
|
});
|
||||||
|
|
||||||
|
// 生成HTML内容
|
||||||
|
const htmlContent = `
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<title>牛只发车验收单</title>
|
||||||
|
<style>
|
||||||
|
@media print {
|
||||||
|
body { margin: 0; }
|
||||||
|
.no-print { display: none; }
|
||||||
|
}
|
||||||
|
body {
|
||||||
|
font-family: "Microsoft YaHei", "SimSun", Arial, sans-serif;
|
||||||
|
margin: 20px;
|
||||||
|
line-height: 1.4;
|
||||||
|
}
|
||||||
|
.header {
|
||||||
|
text-align: center;
|
||||||
|
font-size: 20px;
|
||||||
|
font-weight: bold;
|
||||||
|
margin-bottom: 30px;
|
||||||
|
border-bottom: 2px solid #000;
|
||||||
|
padding-bottom: 10px;
|
||||||
|
}
|
||||||
|
.order-number {
|
||||||
|
text-align: right;
|
||||||
|
margin-bottom: 20px;
|
||||||
|
font-size: 14px;
|
||||||
|
}
|
||||||
|
.info-grid {
|
||||||
|
width: 100%;
|
||||||
|
border-collapse: collapse;
|
||||||
|
margin-bottom: 20px;
|
||||||
|
font-size: 14px;
|
||||||
|
}
|
||||||
|
.info-grid td {
|
||||||
|
border: 1px solid #000;
|
||||||
|
padding: 10px;
|
||||||
|
vertical-align: top;
|
||||||
|
}
|
||||||
|
.info-grid .label {
|
||||||
|
background-color: #f5f5f5;
|
||||||
|
font-weight: bold;
|
||||||
|
width: 20%;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
.cattle-table {
|
||||||
|
width: 100%;
|
||||||
|
border-collapse: collapse;
|
||||||
|
margin-bottom: 20px;
|
||||||
|
font-size: 14px;
|
||||||
|
}
|
||||||
|
.cattle-table th, .cattle-table td {
|
||||||
|
border: 1px solid #000;
|
||||||
|
padding: 10px;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
.cattle-table th {
|
||||||
|
background-color: #f5f5f5;
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
.signature-section {
|
||||||
|
width: 100%;
|
||||||
|
border-collapse: collapse;
|
||||||
|
margin-top: 20px;
|
||||||
|
font-size: 14px;
|
||||||
|
}
|
||||||
|
.signature-section td {
|
||||||
|
border: 1px solid #000;
|
||||||
|
padding: 10px;
|
||||||
|
vertical-align: top;
|
||||||
|
}
|
||||||
|
.signature-section .label {
|
||||||
|
background-color: #f5f5f5;
|
||||||
|
font-weight: bold;
|
||||||
|
width: 25%;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
.print-button {
|
||||||
|
background-color: #409EFF;
|
||||||
|
color: white;
|
||||||
|
border: none;
|
||||||
|
padding: 10px 20px;
|
||||||
|
border-radius: 4px;
|
||||||
|
cursor: pointer;
|
||||||
|
margin: 20px 0;
|
||||||
|
}
|
||||||
|
.print-button:hover {
|
||||||
|
background-color: #66b1ff;
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<div class="header">牛只发车验收单</div>
|
||||||
|
<div class="order-number">订单编号: </div>
|
||||||
|
|
||||||
|
<table class="info-grid">
|
||||||
|
<tr>
|
||||||
|
<td class="label">供货单位</td>
|
||||||
|
<td>${data.supplierName}</td>
|
||||||
|
<td class="label">收货单位</td>
|
||||||
|
<td>${data.buyerName}</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td class="label">发车地点</td>
|
||||||
|
<td>${data.startLocation}</td>
|
||||||
|
<td class="label">发车时间</td>
|
||||||
|
<td>${data.createTime}</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td class="label">到达地点</td>
|
||||||
|
<td>${data.endLocation}</td>
|
||||||
|
<td class="label">动物检疫合格证明编号</td>
|
||||||
|
<td></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td class="label">司机姓名及联系方式</td>
|
||||||
|
<td>${data.driverName} ${data.driverMobile}</td>
|
||||||
|
<td class="label">装车车牌号</td>
|
||||||
|
<td>${data.licensePlate}</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
|
||||||
|
<table class="cattle-table">
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th>序号</th>
|
||||||
|
<th>活牛品种</th>
|
||||||
|
<th>单只体重范围 (斤)</th>
|
||||||
|
<th>下车总数量 (头)</th>
|
||||||
|
<th>下车总重量 (斤)</th>
|
||||||
|
<th>单价 (元/斤)</th>
|
||||||
|
<th>总金额 (元)</th>
|
||||||
|
<th>备注</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
<tr>
|
||||||
|
<td></td>
|
||||||
|
<td></td>
|
||||||
|
<td></td>
|
||||||
|
<td>${data.ratedQuantity}</td>
|
||||||
|
<td>${data.totalWeight}</td>
|
||||||
|
<td>${data.unitPrice}</td>
|
||||||
|
<td>${data.totalAmount}</td>
|
||||||
|
<td></td>
|
||||||
|
</tr>
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
|
||||||
|
<table class="signature-section">
|
||||||
|
<tr>
|
||||||
|
<td class="label">已支付货款时间</td>
|
||||||
|
<td></td>
|
||||||
|
<td class="label">已支付货款金额</td>
|
||||||
|
<td></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td class="label">应支付尾款时间</td>
|
||||||
|
<td></td>
|
||||||
|
<td class="label">应支付尾款金额</td>
|
||||||
|
<td></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td class="label">验收结论</td>
|
||||||
|
<td></td>
|
||||||
|
<td class="label">验收时间</td>
|
||||||
|
<td></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td class="label">供货单位指定验收人签字及联系方式</td>
|
||||||
|
<td></td>
|
||||||
|
<td class="label">收货单位指定验收人签字及联系方式</td>
|
||||||
|
<td></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td class="label">供货单位盖章</td>
|
||||||
|
<td></td>
|
||||||
|
<td class="label">收货单位盖章</td>
|
||||||
|
<td></td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
|
||||||
|
<div class="no-print">
|
||||||
|
<button class="print-button" onclick="window.print()">打印/保存为PDF</button>
|
||||||
|
<p style="color: #666; font-size: 12px;">
|
||||||
|
提示:点击"打印/保存为PDF"按钮可以将此文档打印或保存为PDF格式。
|
||||||
|
在打印对话框中,您也可以选择"另存为PDF"来保存文档。
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</body>
|
||||||
|
</html>`;
|
||||||
|
|
||||||
|
// 在新窗口中打开HTML文档
|
||||||
|
const newWindow = window.open('', '_blank');
|
||||||
|
newWindow.document.write(htmlContent);
|
||||||
|
newWindow.document.close();
|
||||||
|
|
||||||
|
ElMessage.success('文档已生成,可以在新窗口中查看和打印');
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.error('生成文档失败:', error);
|
||||||
|
ElMessage.error('生成文档失败,请重试');
|
||||||
|
} finally {
|
||||||
|
downLoading[row.id] = false;
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// 状态文本转换
|
// 状态文本转换
|
||||||
const getStatusText = (status) => {
|
const getStatusText = (status) => {
|
||||||
const statusMap = {
|
const statusMap = {
|
||||||
1: '待装车',
|
1: '待装车',
|
||||||
2: '已装车/待资金方付款',
|
2: '已装车/预付款已支付',
|
||||||
3: '待核验/资金方已付款',
|
3: '已装车/尾款待支付',
|
||||||
4: '已核验/待买家付款',
|
4: '已核验/待买家付款',
|
||||||
5: '买家已付款'
|
5: '尾款已付款',
|
||||||
|
6: '发票待开/进项票',
|
||||||
|
7: '发票待开/销项'
|
||||||
};
|
};
|
||||||
return statusMap[status] || '未知状态';
|
return statusMap[status] || '未知状态';
|
||||||
};
|
};
|
||||||
@@ -240,10 +516,12 @@ const getStatusText = (status) => {
|
|||||||
const getStatusTagType = (status) => {
|
const getStatusTagType = (status) => {
|
||||||
const typeMap = {
|
const typeMap = {
|
||||||
1: 'warning', // 待装车 - 橙色
|
1: 'warning', // 待装车 - 橙色
|
||||||
2: 'info', // 已装车/待资金方付款 - 蓝色
|
2: 'info', // 已装车/预付款已支付 - 蓝色
|
||||||
3: 'warning', // 待核验/资金方已付款 - 橙色
|
3: 'warning', // 已装车/尾款待支付 - 橙色
|
||||||
4: 'success', // 已核验/待买家付款 - 绿色
|
4: 'success', // 已核验/待买家付款 - 绿色
|
||||||
5: 'success' // 买家已付款 - 绿色
|
5: 'success', // 尾款已付款 - 绿色
|
||||||
|
6: 'info', // 发票待开/进项票 - 蓝色
|
||||||
|
7: 'info' // 发票待开/销项 - 蓝色
|
||||||
};
|
};
|
||||||
return typeMap[status] || 'info';
|
return typeMap[status] || 'info';
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -88,6 +88,9 @@
|
|||||||
<el-descriptions-item label="装车过磅重量:">{{
|
<el-descriptions-item label="装车过磅重量:">{{
|
||||||
data.baseInfo.entruckWeight ? data.baseInfo.entruckWeight + 'kg' : ''
|
data.baseInfo.entruckWeight ? data.baseInfo.entruckWeight + 'kg' : ''
|
||||||
}}</el-descriptions-item>
|
}}</el-descriptions-item>
|
||||||
|
<el-descriptions-item label="落地过磅重量:">{{
|
||||||
|
data.baseInfo.landingEntruckWeight ? data.baseInfo.landingEntruckWeight + 'kg' : ''
|
||||||
|
}}</el-descriptions-item>
|
||||||
|
|
||||||
<!-- 照片上传区域 -->
|
<!-- 照片上传区域 -->
|
||||||
<el-descriptions-item label="检疫票:">
|
<el-descriptions-item label="检疫票:">
|
||||||
|
|||||||
@@ -84,6 +84,13 @@
|
|||||||
>
|
>
|
||||||
</el-form-item>
|
</el-form-item>
|
||||||
</el-col>
|
</el-col>
|
||||||
|
<el-col :span="12">
|
||||||
|
<el-form-item label="落地过磅重量" prop="landingEntruckWeight">
|
||||||
|
<el-input v-model="ruleForm.landingEntruckWeight" placeholder="请输入落地过磅重量" clearable>
|
||||||
|
<template #append>kg</template></el-input
|
||||||
|
>
|
||||||
|
</el-form-item>
|
||||||
|
</el-col>
|
||||||
</el-row>
|
</el-row>
|
||||||
<!-- 照片上传区域 -->
|
<!-- 照片上传区域 -->
|
||||||
<el-divider content-position="left">
|
<el-divider content-position="left">
|
||||||
@@ -366,7 +373,7 @@
|
|||||||
</template>
|
</template>
|
||||||
|
|
||||||
<script setup>
|
<script setup>
|
||||||
import { ref, reactive, onMounted } from 'vue';
|
import { ref, reactive, onMounted, nextTick } from 'vue';
|
||||||
import { hostList, orderLoadDetail, orderLoadSave } from '@/api/shipping.js';
|
import { hostList, orderLoadDetail, orderLoadSave } from '@/api/shipping.js';
|
||||||
import { useUserStore } from '../../store/user';
|
import { useUserStore } from '../../store/user';
|
||||||
|
|
||||||
@@ -393,6 +400,7 @@ const ruleForm = reactive({
|
|||||||
serverDeviceSn: '', // 主机id
|
serverDeviceSn: '', // 主机id
|
||||||
emptyWeight: '', // 空车过磅重量
|
emptyWeight: '', // 空车过磅重量
|
||||||
entruckWeight: '', // 装车过磅重量
|
entruckWeight: '', // 装车过磅重量
|
||||||
|
landingEntruckWeight: '', // 落地过磅重量
|
||||||
quarantineTickeyUrl: '', // 检疫票
|
quarantineTickeyUrl: '', // 检疫票
|
||||||
poundListImg: '', // 传纸质磅单(双章)
|
poundListImg: '', // 传纸质磅单(双章)
|
||||||
entruckWeightVideo: '', // 装车过磅视频
|
entruckWeightVideo: '', // 装车过磅视频
|
||||||
@@ -408,18 +416,73 @@ const ruleForm = reactive({
|
|||||||
xqDevices: [],
|
xqDevices: [],
|
||||||
});
|
});
|
||||||
const rules = reactive({});
|
const rules = reactive({});
|
||||||
|
|
||||||
|
// 自动填充表单数据映射函数
|
||||||
|
const autoFillFormData = (apiData) => {
|
||||||
|
if (!apiData) return;
|
||||||
|
|
||||||
|
// 基础信息映射
|
||||||
|
// 不要覆盖已经设置的 deliveryId
|
||||||
|
if (apiData.id && apiData.id !== '') {
|
||||||
|
ruleForm.deliveryId = apiData.id;
|
||||||
|
}
|
||||||
|
ruleForm.estimatedDeliveryTime = apiData.estimatedDeliveryTime || '';
|
||||||
|
ruleForm.serverDeviceSn = apiData.serverDeviceId || '';
|
||||||
|
|
||||||
|
// 重量信息映射
|
||||||
|
ruleForm.emptyWeight = apiData.emptyWeight || '';
|
||||||
|
ruleForm.entruckWeight = apiData.entruckWeight || '';
|
||||||
|
ruleForm.landingEntruckWeight = apiData.landingEntruckWeight || '';
|
||||||
|
|
||||||
|
// 照片URL映射
|
||||||
|
ruleForm.quarantineTickeyUrl = apiData.quarantineTickeyUrl || '';
|
||||||
|
ruleForm.poundListImg = apiData.poundListImg || '';
|
||||||
|
ruleForm.emptyVehicleFrontPhoto = apiData.emptyVehicleFrontPhoto || '';
|
||||||
|
ruleForm.loadedVehicleFrontPhoto = apiData.loadedVehicleFrontPhoto || '';
|
||||||
|
ruleForm.loadedVehicleWeightPhoto = apiData.loadedVehicleWeightPhoto || '';
|
||||||
|
ruleForm.driverIdCardPhoto = apiData.driverIdCardPhoto || '';
|
||||||
|
|
||||||
|
// 视频URL映射
|
||||||
|
ruleForm.entruckWeightVideo = apiData.entruckWeightVideo || '';
|
||||||
|
ruleForm.emptyWeightVideo = apiData.emptyWeightVideo || '';
|
||||||
|
ruleForm.entruckVideo = apiData.entruckVideo || '';
|
||||||
|
ruleForm.controlSlotVideo = apiData.controlSlotVideo || '';
|
||||||
|
ruleForm.cattleLoadingCircleVideo = apiData.cattleLoadingCircleVideo || '';
|
||||||
|
|
||||||
|
console.log('表单数据已自动填充:', ruleForm);
|
||||||
|
console.log('API数据映射详情:', {
|
||||||
|
deliveryId: apiData.id,
|
||||||
|
estimatedDeliveryTime: apiData.estimatedDeliveryTime,
|
||||||
|
emptyWeight: apiData.emptyWeight,
|
||||||
|
entruckWeight: apiData.entruckWeight,
|
||||||
|
landingEntruckWeight: apiData.landingEntruckWeight,
|
||||||
|
quarantineTickeyUrl: apiData.quarantineTickeyUrl,
|
||||||
|
poundListImg: apiData.poundListImg
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
// 查询详情
|
// 查询详情
|
||||||
const getOrderDetail = () => {
|
const getOrderDetail = () => {
|
||||||
orderLoadDetail({
|
orderLoadDetail({
|
||||||
deliveryId: data.deliveryId,
|
deliveryId: data.deliveryId,
|
||||||
}).then((res) => {
|
}).then((res) => {
|
||||||
|
console.log('getOrderDetail API 响应:', res);
|
||||||
if (res.code === 200) {
|
if (res.code === 200) {
|
||||||
|
console.log('API 返回的数据:', res.data);
|
||||||
const ear = res.data && res.data.deliveryDevices;
|
const ear = res.data && res.data.deliveryDevices;
|
||||||
const collar = res.data && res.data.xqDevices;
|
const collar = res.data && res.data.xqDevices;
|
||||||
// 兼容后端返回数组或 { rows, total } 两种格式
|
// 兼容后端返回数组或 { rows, total } 两种格式
|
||||||
data.deliveryDevices = Array.isArray(ear) ? ear : (ear && ear.rows ? ear.rows : []);
|
data.deliveryDevices = Array.isArray(ear) ? ear : (ear && ear.rows ? ear.rows : []);
|
||||||
data.xqDevices = Array.isArray(collar) ? collar : (collar && collar.rows ? collar.rows : []);
|
data.xqDevices = Array.isArray(collar) ? collar : (collar && collar.rows ? collar.rows : []);
|
||||||
|
|
||||||
|
console.log('准备调用 autoFillFormData,数据:', res.data);
|
||||||
|
// 自动填充表单数据
|
||||||
|
autoFillFormData(res.data);
|
||||||
|
} else {
|
||||||
|
console.error('getOrderDetail API 调用失败:', res);
|
||||||
}
|
}
|
||||||
|
}).catch((error) => {
|
||||||
|
console.error('getOrderDetail API 调用异常:', error);
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
// 智能主机远程搜索
|
// 智能主机远程搜索
|
||||||
@@ -553,7 +616,27 @@ const onClickSave = () => {
|
|||||||
data.saveLoading = true;
|
data.saveLoading = true;
|
||||||
ruleForm.deliveryDevices = data.deliveryDevices;
|
ruleForm.deliveryDevices = data.deliveryDevices;
|
||||||
ruleForm.xqDevices = data.xqDevices;
|
ruleForm.xqDevices = data.xqDevices;
|
||||||
orderLoadSave(ruleForm).then((res) => {
|
|
||||||
|
// 确保 deliveryId 是数字类型
|
||||||
|
const saveData = { ...ruleForm };
|
||||||
|
console.log('保存时的 deliveryId:', saveData.deliveryId, '类型:', typeof saveData.deliveryId);
|
||||||
|
|
||||||
|
if (saveData.deliveryId) {
|
||||||
|
const parsedId = parseInt(saveData.deliveryId);
|
||||||
|
console.log('解析后的 ID:', parsedId, 'isNaN:', isNaN(parsedId));
|
||||||
|
if (isNaN(parsedId)) {
|
||||||
|
ElMessage.error('运送清单ID格式错误');
|
||||||
|
data.saveLoading = false;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
saveData.deliveryId = parsedId;
|
||||||
|
} else {
|
||||||
|
ElMessage.error('运送清单ID不能为空');
|
||||||
|
data.saveLoading = false;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
orderLoadSave(saveData).then((res) => {
|
||||||
data.saveLoading = false;
|
data.saveLoading = false;
|
||||||
if (res.code === 200) {
|
if (res.code === 200) {
|
||||||
ElMessage({
|
ElMessage({
|
||||||
@@ -577,7 +660,7 @@ const handleClose = () => {
|
|||||||
formDataRef.value.resetFields();
|
formDataRef.value.resetFields();
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
const onShowDialog = (row) => {
|
const onShowDialog = (row, apiData = null) => {
|
||||||
data.dialogVisible = true;
|
data.dialogVisible = true;
|
||||||
if (formDataRef.value) {
|
if (formDataRef.value) {
|
||||||
formDataRef.value.resetFields();
|
formDataRef.value.resetFields();
|
||||||
@@ -586,7 +669,16 @@ const onShowDialog = (row) => {
|
|||||||
nextTick(() => {
|
nextTick(() => {
|
||||||
data.deliveryId = row.id;
|
data.deliveryId = row.id;
|
||||||
ruleForm.deliveryId = row.id;
|
ruleForm.deliveryId = row.id;
|
||||||
getOrderDetail();
|
console.log('设置 deliveryId:', row.id, '类型:', typeof row.id);
|
||||||
|
|
||||||
|
// 如果提供了API数据,直接填充表单
|
||||||
|
if (apiData) {
|
||||||
|
autoFillFormData(apiData);
|
||||||
|
} else {
|
||||||
|
// 否则从服务器获取详情
|
||||||
|
getOrderDetail();
|
||||||
|
}
|
||||||
|
|
||||||
getHostList();
|
getHostList();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,17 +1,21 @@
|
|||||||
<template>
|
<template>
|
||||||
<div>
|
<div>
|
||||||
<base-search :formItemList="formItemList" @search="searchFrom" ref="baseSearchRef"> </base-search>
|
<base-search :formItemList="formItemList" @search="searchFrom" ref="baseSearchRef"> </base-search>
|
||||||
<div style="display: flex; padding: 10px; background: #fff; margin-bottom: 10px">
|
<!-- 横向滚动操作栏 -->
|
||||||
<el-button type="primary" v-hasPermi="['loading:create']" @click="showAddDialog(null)">创建装车订单</el-button>
|
<div class="operation-scroll-bar">
|
||||||
<!-- <el-button
|
<div class="operation-scroll-container">
|
||||||
type="primary"
|
<el-button type="primary" v-hasPermi="['loading:create']" @click="showAddDialog(null)">创建装车订单</el-button>
|
||||||
v-hasPermi="['loading:add']"
|
<!-- <el-button
|
||||||
@click="showCreateDeliveryDialog"
|
type="primary"
|
||||||
style="margin-left: 10px"
|
v-hasPermi="['loading:add']"
|
||||||
>
|
@click="showCreateDeliveryDialog"
|
||||||
新增运送清单
|
style="margin-left: 10px"
|
||||||
</el-button> -->
|
>
|
||||||
|
新增运送清单
|
||||||
|
</el-button> -->
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="main-container">
|
<div class="main-container">
|
||||||
<el-table :data="rows" :key="data.tableKey" border v-loading="data.dataListLoading" element-loading-text="数据加载中..." style="width: 100%">
|
<el-table :data="rows" :key="data.tableKey" border v-loading="data.dataListLoading" element-loading-text="数据加载中..." style="width: 100%">
|
||||||
<el-table-column label="装车订单编号" prop="deliveryNumber">
|
<el-table-column label="装车订单编号" prop="deliveryNumber">
|
||||||
@@ -440,17 +444,19 @@ const del = (id) => {
|
|||||||
};
|
};
|
||||||
// 装车
|
// 装车
|
||||||
const loadClick = (row) => {
|
const loadClick = (row) => {
|
||||||
|
console.log('装车按钮点击,row数据:', row);
|
||||||
if (LoadDialogRef.value) {
|
if (LoadDialogRef.value) {
|
||||||
LoadDialogRef.value.onShowDialog(row);
|
// 直接传递row数据作为API数据
|
||||||
|
LoadDialogRef.value.onShowDialog(row, row);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
// 编辑状态
|
// 编辑状态
|
||||||
const editStatus = (row) => {
|
const editStatus = (row) => {
|
||||||
ElMessageBox.prompt('请输入状态(1-待装车 2-已装车/待资金方付款 3-待核验/资金方已付款 4-已核验/待买家付款 5-买家已付款)', '修改状态', {
|
ElMessageBox.prompt('请输入状态(1-待装车 2-已装车/预付款已支付 3-已装车/尾款待支付 4-已核验/待买家付款 5-尾款已付款 6-发票待开/进项票 7-发票待开/销项)', '修改状态', {
|
||||||
confirmButtonText: '确定',
|
confirmButtonText: '确定',
|
||||||
cancelButtonText: '取消',
|
cancelButtonText: '取消',
|
||||||
inputPattern: /^[12345]$/,
|
inputPattern: /^[1234567]$/,
|
||||||
inputErrorMessage: '请输入1、2、3、4或5',
|
inputErrorMessage: '请输入1、2、3、4、5、6或7',
|
||||||
inputValue: String(row.status || 1)
|
inputValue: String(row.status || 1)
|
||||||
}).then(({ value }) => {
|
}).then(({ value }) => {
|
||||||
updateDeliveryStatus({ id: row.id, status: parseInt(value) })
|
updateDeliveryStatus({ id: row.id, status: parseInt(value) })
|
||||||
@@ -473,10 +479,12 @@ const editStatus = (row) => {
|
|||||||
const getStatusText = (status) => {
|
const getStatusText = (status) => {
|
||||||
const statusMap = {
|
const statusMap = {
|
||||||
1: '待装车',
|
1: '待装车',
|
||||||
2: '已装车/待资金方付款',
|
2: '已装车/预付款已支付',
|
||||||
3: '待核验/资金方已付款',
|
3: '已装车/尾款待支付',
|
||||||
4: '已核验/待买家付款',
|
4: '已核验/待买家付款',
|
||||||
5: '买家已付款'
|
5: '尾款已付款',
|
||||||
|
6: '发票待开/进项票',
|
||||||
|
7: '发票待开/销项'
|
||||||
};
|
};
|
||||||
return statusMap[status] || '未知状态';
|
return statusMap[status] || '未知状态';
|
||||||
};
|
};
|
||||||
@@ -485,10 +493,12 @@ const getStatusText = (status) => {
|
|||||||
const getStatusTagType = (status) => {
|
const getStatusTagType = (status) => {
|
||||||
const typeMap = {
|
const typeMap = {
|
||||||
1: 'warning', // 待装车 - 橙色
|
1: 'warning', // 待装车 - 橙色
|
||||||
2: 'info', // 已装车/待资金方付款 - 蓝色
|
2: 'info', // 已装车/预付款已支付 - 蓝色
|
||||||
3: 'warning', // 待核验/资金方已付款 - 橙色
|
3: 'warning', // 已装车/尾款待支付 - 橙色
|
||||||
4: 'success', // 已核验/待买家付款 - 绿色
|
4: 'success', // 已核验/待买家付款 - 绿色
|
||||||
5: 'success' // 买家已付款 - 绿色
|
5: 'success', // 尾款已付款 - 绿色
|
||||||
|
6: 'info', // 发票待开/进项票 - 蓝色
|
||||||
|
7: 'info' // 发票待开/销项 - 蓝色
|
||||||
};
|
};
|
||||||
return typeMap[status] || 'info';
|
return typeMap[status] || 'info';
|
||||||
};
|
};
|
||||||
@@ -545,6 +555,7 @@ const getProcessedCarPhotos = (row) => {
|
|||||||
return carImgUrls;
|
return carImgUrls;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
// 监听rows变化,强制更新表格
|
// 监听rows变化,强制更新表格
|
||||||
watch(rows, (newRows) => {
|
watch(rows, (newRows) => {
|
||||||
console.log('rows数据变化:', newRows);
|
console.log('rows数据变化:', newRows);
|
||||||
@@ -564,4 +575,65 @@ onMounted(() => {
|
|||||||
});
|
});
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<style lang="less" scoped></style>
|
<style lang="less" scoped>
|
||||||
|
/* 横向滚动操作栏样式 */
|
||||||
|
.operation-scroll-bar {
|
||||||
|
background: #fff;
|
||||||
|
border: 1px solid #e9ecef;
|
||||||
|
border-radius: 8px;
|
||||||
|
margin-bottom: 16px;
|
||||||
|
padding: 12px;
|
||||||
|
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.05);
|
||||||
|
}
|
||||||
|
|
||||||
|
.operation-scroll-container {
|
||||||
|
display: flex;
|
||||||
|
overflow-x: auto;
|
||||||
|
gap: 10px;
|
||||||
|
padding-bottom: 5px;
|
||||||
|
|
||||||
|
/* 自定义滚动条样式 */
|
||||||
|
&::-webkit-scrollbar {
|
||||||
|
height: 6px;
|
||||||
|
}
|
||||||
|
|
||||||
|
&::-webkit-scrollbar-track {
|
||||||
|
background: #f1f1f1;
|
||||||
|
border-radius: 3px;
|
||||||
|
}
|
||||||
|
|
||||||
|
&::-webkit-scrollbar-thumb {
|
||||||
|
background: #c1c1c1;
|
||||||
|
border-radius: 3px;
|
||||||
|
|
||||||
|
&:hover {
|
||||||
|
background: #a8a8a8;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* 确保按钮不会被压缩 */
|
||||||
|
.el-button {
|
||||||
|
flex-shrink: 0;
|
||||||
|
white-space: nowrap;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/* 响应式设计 */
|
||||||
|
@media (max-width: 768px) {
|
||||||
|
.operation-scroll-bar {
|
||||||
|
padding: 8px;
|
||||||
|
margin-bottom: 12px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.operation-scroll-container {
|
||||||
|
gap: 8px;
|
||||||
|
|
||||||
|
.el-button {
|
||||||
|
font-size: 12px;
|
||||||
|
padding: 6px 12px;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
|||||||
88
tradeCattle/MENU_MODIFICATION_GUIDE.md
Normal file
88
tradeCattle/MENU_MODIFICATION_GUIDE.md
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
# 菜单修改说明
|
||||||
|
|
||||||
|
## 问题描述
|
||||||
|
需要修改侧边栏菜单:
|
||||||
|
- 删除第一个"test"菜单项
|
||||||
|
- 将第二个"test"菜单项改为"系统管理"
|
||||||
|
|
||||||
|
## 解决方案
|
||||||
|
|
||||||
|
菜单数据存储在数据库的 `sys_menu` 表中,需要通过SQL语句直接修改数据库。
|
||||||
|
|
||||||
|
## 执行步骤
|
||||||
|
|
||||||
|
### 1. 查看当前菜单结构
|
||||||
|
```sql
|
||||||
|
SELECT id, parent_id, name, type, sort, icon
|
||||||
|
FROM sys_menu
|
||||||
|
WHERE is_delete = 0
|
||||||
|
ORDER BY sort;
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. 查看所有"test"菜单项
|
||||||
|
```sql
|
||||||
|
SELECT id, parent_id, name, type, sort, icon
|
||||||
|
FROM sys_menu
|
||||||
|
WHERE name = 'test' AND is_delete = 0
|
||||||
|
ORDER BY sort;
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. 执行修改
|
||||||
|
运行 `update_menu_names_final.sql` 文件中的SQL语句:
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- 删除sort值最小的"test"菜单项
|
||||||
|
DELETE FROM sys_menu
|
||||||
|
WHERE name = 'test'
|
||||||
|
AND parent_id = 0
|
||||||
|
AND is_delete = 0
|
||||||
|
AND sort = (
|
||||||
|
SELECT min_sort FROM (
|
||||||
|
SELECT MIN(sort) as min_sort
|
||||||
|
FROM sys_menu
|
||||||
|
WHERE name = 'test' AND parent_id = 0 AND is_delete = 0
|
||||||
|
) t
|
||||||
|
);
|
||||||
|
|
||||||
|
-- 将剩余的"test"菜单项改为"系统管理"
|
||||||
|
UPDATE sys_menu
|
||||||
|
SET name = '系统管理',
|
||||||
|
update_time = NOW()
|
||||||
|
WHERE name = 'test'
|
||||||
|
AND parent_id = 0
|
||||||
|
AND is_delete = 0;
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4. 验证修改结果
|
||||||
|
```sql
|
||||||
|
SELECT id, parent_id, name, type, sort, icon
|
||||||
|
FROM sys_menu
|
||||||
|
WHERE is_delete = 0
|
||||||
|
ORDER BY sort;
|
||||||
|
```
|
||||||
|
|
||||||
|
## 注意事项
|
||||||
|
|
||||||
|
1. **备份数据**:执行SQL前请先备份 `sys_menu` 表
|
||||||
|
2. **权限检查**:确保有足够的数据库权限执行DELETE和UPDATE操作
|
||||||
|
3. **测试环境**:建议先在测试环境执行,确认无误后再在生产环境执行
|
||||||
|
4. **前端缓存**:修改后可能需要清除前端缓存或重新登录才能看到效果
|
||||||
|
|
||||||
|
## 菜单表结构说明
|
||||||
|
|
||||||
|
- `id`: 菜单ID(主键)
|
||||||
|
- `parent_id`: 父菜单ID(0表示顶级菜单)
|
||||||
|
- `name`: 菜单显示名称
|
||||||
|
- `type`: 菜单类型(0-目录,1-菜单,2-按钮)
|
||||||
|
- `sort`: 排序字段(数字越小越靠前)
|
||||||
|
- `icon`: 菜单图标
|
||||||
|
- `is_delete`: 是否删除(0-未删除,1-已删除)
|
||||||
|
|
||||||
|
## 前端影响
|
||||||
|
|
||||||
|
修改数据库后,前端会通过以下API获取菜单:
|
||||||
|
- API: `/getUserMenus`
|
||||||
|
- 前端文件: `pc-cattle-transportation/src/store/permission.js`
|
||||||
|
- 菜单渲染: `pc-cattle-transportation/src/components/layout/index.vue`
|
||||||
|
|
||||||
|
修改完成后,用户需要重新登录或刷新页面才能看到新的菜单结构。
|
||||||
@@ -1,24 +1,24 @@
|
|||||||
-- =============================================
|
-- =============================================
|
||||||
-- 数据库迁移脚本:为 delivery 表添加 landing_entruck_weight 字段
|
-- 数据库迁移脚本:为 delivery 表添加 landing_entruck_weight 字段
|
||||||
-- 用途:存储约定单价(元/公斤)
|
-- 用途:存储落地过磅重量
|
||||||
-- 创建时间:2025-01-27
|
-- 创建时间:2025-01-27
|
||||||
-- 数据库:MySQL
|
-- 数据库:MySQL
|
||||||
-- =============================================
|
-- =============================================
|
||||||
|
|
||||||
-- 为 delivery 表添加 landing_entruck_weight 字段
|
-- 为 delivery 表添加 landingEntruck_weight 字段
|
||||||
-- 字段类型:DECIMAL(10,2)(支持小数点后2位)
|
-- 字段类型:DECIMAL(10,2)(支持小数点后2位)
|
||||||
-- 位置:在 sale_price 字段之后
|
-- 位置:在 entruck_weight 字段之后
|
||||||
-- 注释:约定单价(元/公斤)
|
-- 注释:落地过磅重量
|
||||||
ALTER TABLE delivery
|
ALTER TABLE delivery
|
||||||
ADD COLUMN firm_price DECIMAL(10,2) COMMENT '约定单价(元/公斤)'
|
ADD COLUMN landingEntruck_weight DECIMAL(10,2) COMMENT '落地过磅重量'
|
||||||
AFTER sale_price;
|
AFTER entruck_weight;
|
||||||
|
|
||||||
-- 验证字段是否添加成功
|
-- 验证字段是否添加成功
|
||||||
SELECT COLUMN_NAME, DATA_TYPE, IS_NULLABLE, COLUMN_DEFAULT, COLUMN_COMMENT
|
SELECT COLUMN_NAME, DATA_TYPE, IS_NULLABLE, COLUMN_DEFAULT, COLUMN_COMMENT
|
||||||
FROM INFORMATION_SCHEMA.COLUMNS
|
FROM INFORMATION_SCHEMA.COLUMNS
|
||||||
WHERE TABLE_SCHEMA = DATABASE()
|
WHERE TABLE_SCHEMA = DATABASE()
|
||||||
AND TABLE_NAME = 'delivery'
|
AND TABLE_NAME = 'delivery'
|
||||||
AND COLUMN_NAME = 'firm_price';
|
AND COLUMN_NAME = 'landingEntruck_weight';
|
||||||
|
|
||||||
-- 显示完整的表结构
|
-- 显示完整的表结构
|
||||||
DESCRIBE delivery;
|
DESCRIBE delivery;
|
||||||
|
|||||||
117
tradeCattle/aiotagro-cattle-trade/node_modules/.package-lock.json
generated
vendored
Normal file
117
tradeCattle/aiotagro-cattle-trade/node_modules/.package-lock.json
generated
vendored
Normal file
@@ -0,0 +1,117 @@
|
|||||||
|
{
|
||||||
|
"name": "aiotagro-cattle-trade",
|
||||||
|
"lockfileVersion": 2,
|
||||||
|
"requires": true,
|
||||||
|
"packages": {
|
||||||
|
"node_modules/@xmldom/xmldom": {
|
||||||
|
"version": "0.9.8",
|
||||||
|
"resolved": "https://registry.npmmirror.com/@xmldom/xmldom/-/xmldom-0.9.8.tgz",
|
||||||
|
"integrity": "sha512-p96FSY54r+WJ50FIOsCOjyj/wavs8921hG5+kVMmZgKcvIKxMXHTrjNJvRgWa/zuX3B6t2lijLNFaOyuxUH+2A==",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=14.6"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/core-util-is": {
|
||||||
|
"version": "1.0.3",
|
||||||
|
"resolved": "https://registry.npmmirror.com/core-util-is/-/core-util-is-1.0.3.tgz",
|
||||||
|
"integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ=="
|
||||||
|
},
|
||||||
|
"node_modules/docxtemplater": {
|
||||||
|
"version": "3.66.7",
|
||||||
|
"resolved": "https://registry.npmmirror.com/docxtemplater/-/docxtemplater-3.66.7.tgz",
|
||||||
|
"integrity": "sha512-WRAQaUOPMbHJ/6L4BSdfpJm1nYmsErIgDJ3zHHghr7Xjaba3jWC4lUQkEvZka/n9UZdPQqiciIddjZvB4XAsNQ==",
|
||||||
|
"dependencies": {
|
||||||
|
"@xmldom/xmldom": "^0.9.8"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=0.10"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/file-saver": {
|
||||||
|
"version": "2.0.5",
|
||||||
|
"resolved": "https://registry.npmmirror.com/file-saver/-/file-saver-2.0.5.tgz",
|
||||||
|
"integrity": "sha512-P9bmyZ3h/PRG+Nzga+rbdI4OEpNDzAVyy74uVO9ATgzLK6VtAsYybF/+TOCvrc0MO793d6+42lLyZTw7/ArVzA=="
|
||||||
|
},
|
||||||
|
"node_modules/immediate": {
|
||||||
|
"version": "3.0.6",
|
||||||
|
"resolved": "https://registry.npmmirror.com/immediate/-/immediate-3.0.6.tgz",
|
||||||
|
"integrity": "sha512-XXOFtyqDjNDAQxVfYxuF7g9Il/IbWmmlQg2MYKOH8ExIT1qg6xc4zyS3HaEEATgs1btfzxq15ciUiY7gjSXRGQ=="
|
||||||
|
},
|
||||||
|
"node_modules/inherits": {
|
||||||
|
"version": "2.0.4",
|
||||||
|
"resolved": "https://registry.npmmirror.com/inherits/-/inherits-2.0.4.tgz",
|
||||||
|
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="
|
||||||
|
},
|
||||||
|
"node_modules/isarray": {
|
||||||
|
"version": "1.0.0",
|
||||||
|
"resolved": "https://registry.npmmirror.com/isarray/-/isarray-1.0.0.tgz",
|
||||||
|
"integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ=="
|
||||||
|
},
|
||||||
|
"node_modules/jszip": {
|
||||||
|
"version": "3.10.1",
|
||||||
|
"resolved": "https://registry.npmmirror.com/jszip/-/jszip-3.10.1.tgz",
|
||||||
|
"integrity": "sha512-xXDvecyTpGLrqFrvkrUSoxxfJI5AH7U8zxxtVclpsUtMCq4JQ290LY8AW5c7Ggnr/Y/oK+bQMbqK2qmtk3pN4g==",
|
||||||
|
"dependencies": {
|
||||||
|
"lie": "~3.3.0",
|
||||||
|
"pako": "~1.0.2",
|
||||||
|
"readable-stream": "~2.3.6",
|
||||||
|
"setimmediate": "^1.0.5"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/lie": {
|
||||||
|
"version": "3.3.0",
|
||||||
|
"resolved": "https://registry.npmmirror.com/lie/-/lie-3.3.0.tgz",
|
||||||
|
"integrity": "sha512-UaiMJzeWRlEujzAuw5LokY1L5ecNQYZKfmyZ9L7wDHb/p5etKaxXhohBcrw0EYby+G/NA52vRSN4N39dxHAIwQ==",
|
||||||
|
"dependencies": {
|
||||||
|
"immediate": "~3.0.5"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/pako": {
|
||||||
|
"version": "1.0.11",
|
||||||
|
"resolved": "https://registry.npmmirror.com/pako/-/pako-1.0.11.tgz",
|
||||||
|
"integrity": "sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw=="
|
||||||
|
},
|
||||||
|
"node_modules/process-nextick-args": {
|
||||||
|
"version": "2.0.1",
|
||||||
|
"resolved": "https://registry.npmmirror.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz",
|
||||||
|
"integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag=="
|
||||||
|
},
|
||||||
|
"node_modules/readable-stream": {
|
||||||
|
"version": "2.3.8",
|
||||||
|
"resolved": "https://registry.npmmirror.com/readable-stream/-/readable-stream-2.3.8.tgz",
|
||||||
|
"integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==",
|
||||||
|
"dependencies": {
|
||||||
|
"core-util-is": "~1.0.0",
|
||||||
|
"inherits": "~2.0.3",
|
||||||
|
"isarray": "~1.0.0",
|
||||||
|
"process-nextick-args": "~2.0.0",
|
||||||
|
"safe-buffer": "~5.1.1",
|
||||||
|
"string_decoder": "~1.1.1",
|
||||||
|
"util-deprecate": "~1.0.1"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/safe-buffer": {
|
||||||
|
"version": "5.1.2",
|
||||||
|
"resolved": "https://registry.npmmirror.com/safe-buffer/-/safe-buffer-5.1.2.tgz",
|
||||||
|
"integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g=="
|
||||||
|
},
|
||||||
|
"node_modules/setimmediate": {
|
||||||
|
"version": "1.0.5",
|
||||||
|
"resolved": "https://registry.npmmirror.com/setimmediate/-/setimmediate-1.0.5.tgz",
|
||||||
|
"integrity": "sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA=="
|
||||||
|
},
|
||||||
|
"node_modules/string_decoder": {
|
||||||
|
"version": "1.1.1",
|
||||||
|
"resolved": "https://registry.npmmirror.com/string_decoder/-/string_decoder-1.1.1.tgz",
|
||||||
|
"integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==",
|
||||||
|
"dependencies": {
|
||||||
|
"safe-buffer": "~5.1.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/util-deprecate": {
|
||||||
|
"version": "1.0.2",
|
||||||
|
"resolved": "https://registry.npmmirror.com/util-deprecate/-/util-deprecate-1.0.2.tgz",
|
||||||
|
"integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw=="
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
891
tradeCattle/aiotagro-cattle-trade/node_modules/@xmldom/xmldom/CHANGELOG.md
generated
vendored
Normal file
891
tradeCattle/aiotagro-cattle-trade/node_modules/@xmldom/xmldom/CHANGELOG.md
generated
vendored
Normal file
@@ -0,0 +1,891 @@
|
|||||||
|
# Changelog
|
||||||
|
|
||||||
|
All notable changes to this project will be documented in this file.
|
||||||
|
|
||||||
|
This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||||
|
|
||||||
|
## [0.9.8](https://github.com/xmldom/xmldom/compare/0.9.8...0.9.7)
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- fix: replace \u2029 as part of normalizeLineEndings [`#839`](https://github.com/xmldom/xmldom/pull/839) / [`#838`](https://github.com/xmldom/xmldom/issues/838)
|
||||||
|
- perf: speed up line detection [`#847`](https://github.com/xmldom/xmldom/pull/847) / [`#838`](https://github.com/xmldom/xmldom/issues/838)
|
||||||
|
|
||||||
|
### Chore
|
||||||
|
|
||||||
|
- updated dependencies
|
||||||
|
- drop jazzer and rxjs devDependencies [`#845`](https://github.com/xmldom/xmldom/pull/845)
|
||||||
|
|
||||||
|
Thank you,
|
||||||
|
[@kboshold](https://github.com/kboshold),
|
||||||
|
[@Ponynjaa](https://github.com/Ponynjaa),
|
||||||
|
for your contributions.
|
||||||
|
|
||||||
|
|
||||||
|
## [0.9.7](https://github.com/xmldom/xmldom/compare/0.9.6...0.9.7)
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- Implementation of `hasAttributes` [`#804`](https://github.com/xmldom/xmldom/pull/804)
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- locator is now true even when other options are being used for the DOMParser [`#802`](https://github.com/xmldom/xmldom/issues/802) / [`#803`](https://github.com/xmldom/xmldom/pull/803)
|
||||||
|
- allow case-insensitive DOCTYPE in HTML [`#817`](https://github.com/xmldom/xmldom/issues/817) / [`#819`](https://github.com/xmldom/xmldom/pull/819)
|
||||||
|
|
||||||
|
### Performance
|
||||||
|
|
||||||
|
- simplify `DOM.compareDocumentPosition` [`#805`](https://github.com/xmldom/xmldom/pull/805)
|
||||||
|
|
||||||
|
### Chore
|
||||||
|
|
||||||
|
- updated devDependencies
|
||||||
|
|
||||||
|
Thank you,
|
||||||
|
[@zorkow](https://github.com/zorkow),
|
||||||
|
[@Ponynjaa](https://github.com/Ponynjaa),
|
||||||
|
[@WesselKroos](https://github.com/WesselKroos),
|
||||||
|
for your contributions.
|
||||||
|
|
||||||
|
|
||||||
|
## [0.9.6](https://github.com/xmldom/xmldom/compare/0.9.5...0.9.6)
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- lower error level for unicode replacement character [`#790`](https://github.com/xmldom/xmldom/issues/790) / [`#794`](https://github.com/xmldom/xmldom/pull/794) / [`#797`](https://github.com/xmldom/xmldom/pull/797)
|
||||||
|
|
||||||
|
### Chore
|
||||||
|
|
||||||
|
- updated devDependencies
|
||||||
|
- migrate renovate config [`#792`](https://github.com/xmldom/xmldom/pull/792)
|
||||||
|
|
||||||
|
Thank you, [@eglitise](https://github.com/eglitise), for your contributions.
|
||||||
|
|
||||||
|
|
||||||
|
## [0.9.5](https://github.com/xmldom/xmldom/compare/0.9.4...0.9.5)
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- fix: re-index childNodes on insertBefore [`#763`](https://github.com/xmldom/xmldom/issues/763) / [`#766`](https://github.com/xmldom/xmldom/pull/766)
|
||||||
|
|
||||||
|
Thank you,
|
||||||
|
[@mureinik](https://github.com/mureinik),
|
||||||
|
for your contributions.
|
||||||
|
|
||||||
|
|
||||||
|
## [0.9.4](https://github.com/xmldom/xmldom/compare/0.9.3...0.9.4)
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- restore performance for large amount of child nodes [`#748`](https://github.com/xmldom/xmldom/issues/748) / [`#760`](https://github.com/xmldom/xmldom/pull/760)
|
||||||
|
- types: correct error handler level to `warning` (#759) [`#754`](https://github.com/xmldom/xmldom/issues/754) / [`#759`](https://github.com/xmldom/xmldom/pull/759)
|
||||||
|
|
||||||
|
### Docs
|
||||||
|
|
||||||
|
- test: verify BOM handling [`#758`](https://github.com/xmldom/xmldom/pull/758)
|
||||||
|
|
||||||
|
Thank you,
|
||||||
|
[@luffynando](https://github.com/luffynando),
|
||||||
|
[@mattiasw](https://github.com/mattiasw),
|
||||||
|
[@JoinerDev](https://github.com/JoinerDev),
|
||||||
|
for your contributions.
|
||||||
|
|
||||||
|
|
||||||
|
## [0.9.3](https://github.com/xmldom/xmldom/compare/0.9.2...0.9.3)
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- restore more `Node` and `ProcessingInstruction` types [`#725`](https://github.com/xmldom/xmldom/issues/725) / [`#726`](https://github.com/xmldom/xmldom/pull/726)
|
||||||
|
- `getElements*` methods return `LiveNodeList<Element>` [`#731`](https://github.com/xmldom/xmldom/issues/731) / [`#734`](https://github.com/xmldom/xmldom/pull/734)
|
||||||
|
- Add more missing `Node` props [`#728`](https://github.com/xmldom/xmldom/pull/728), triggered by unclosed [`#724`](https://github.com/xmldom/xmldom/pull/724)
|
||||||
|
|
||||||
|
### Docs
|
||||||
|
|
||||||
|
- Update supported runtimes in readme (NodeJS >= 14.6 and other [ES5 compatible runtimes](https://compat-table.github.io/compat-table/es5/))
|
||||||
|
|
||||||
|
### Chore
|
||||||
|
|
||||||
|
- updates devDependencies
|
||||||
|
|
||||||
|
Thank you,
|
||||||
|
[@Ponynjaa](https://github.com/Ponynjaa),
|
||||||
|
[@ayZagen](https://github.com/ayZagen),
|
||||||
|
[@sserdyuk](https://github.com/sserdyuk),
|
||||||
|
[@wydengyre](https://github.com/wydengyre),
|
||||||
|
[@mykola-mokhnach](https://github.com/mykola-mokhnach),
|
||||||
|
[@benkroeger](https://github.com/benkroeger),
|
||||||
|
for your contributions.
|
||||||
|
|
||||||
|
# Changelog
|
||||||
|
|
||||||
|
All notable changes to this project will be documented in this file.
|
||||||
|
|
||||||
|
This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||||
|
|
||||||
|
## [0.9.2](https://github.com/xmldom/xmldom/compare/0.9.1...0.9.2)
|
||||||
|
|
||||||
|
### Feature
|
||||||
|
|
||||||
|
- add `Element.getElementsByClassName` [`#722`](https://github.com/xmldom/xmldom/pull/722)
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- add missing types for `Document.documentElement` and `Element.tagName` [`#721`](https://github.com/xmldom/xmldom/pull/721) [`#720`](https://github.com/xmldom/xmldom/issues/720)
|
||||||
|
|
||||||
|
Thank you, [@censujiang](https://github.com/censujiang), [@Mathias-S](https://github.com/Mathias-S), for your contributions
|
||||||
|
|
||||||
|
|
||||||
|
## [0.9.1](https://github.com/xmldom/xmldom/compare/0.9.0...0.9.1)
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- DOMParser.parseFromString requires mimeType as second argument [`#713`](https://github.com/xmldom/xmldom/pull/713)
|
||||||
|
- correct spelling of `isHTMLMimeType` in type definition [`#715`](https://github.com/xmldom/xmldom/pull/715) / [`#712`](https://github.com/xmldom/xmldom/issues/712)
|
||||||
|
- sync types with exports [`#717`](https://github.com/xmldom/xmldom/pull/717) / [`#285`](https://github.com/xmldom/xmldom/issues/285) / [`#695`](https://github.com/xmldom/xmldom/issues/695)
|
||||||
|
|
||||||
|
### Other
|
||||||
|
|
||||||
|
- minimum tested node version is 14 [`#710`](https://github.com/xmldom/xmldom/pull/710)
|
||||||
|
|
||||||
|
Thank you, [@krystofwoldrich](https://github.com/krystofwoldrich), [@marvinruder](https://github.com/marvinruder), [@amacneil](https://github.com/amacneil), [@defunctzombie](https://github.com/defunctzombie),
|
||||||
|
[@tjhorner](https://github.com/tjhorner), [@danon](https://github.com/danon), for your contributions
|
||||||
|
|
||||||
|
|
||||||
|
## [0.9.0](https://github.com/xmldom/xmldom/compare/0.9.0-beta.11...0.9.0)
|
||||||
|
|
||||||
|
- [Discussion](https://github.com/xmldom/xmldom/discussions/435)
|
||||||
|
- [Summary on dev.to](https://dev.to/karfau/release-090-of-xmldomxmldom-4106)
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
- feat: expose all DOM level 2 element prototypes [`#637`](https://github.com/xmldom/xmldom/pull/637) / [`#40`](https://github.com/xmldom/xmldom/issues/40)
|
||||||
|
- feat: add iterator function to NodeList and NamedNodeMap [`#634`](https://github.com/xmldom/xmldom/pull/634) / [`#633`](https://github.com/xmldom/xmldom/issues/633)
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- parse empty/whitspace only doctype internal subset [`#692`](https://github.com/xmldom/xmldom/pull/692)
|
||||||
|
- avoid prototype clash in namespace prefix [`#554`](https://github.com/xmldom/xmldom/pull/554)
|
||||||
|
- report fatalError when doctype is inside elements [`#550`](https://github.com/xmldom/xmldom/pull/550)
|
||||||
|
|
||||||
|
### Other
|
||||||
|
|
||||||
|
- test: add fuzz target and regression tests [`#556`](https://github.com/xmldom/xmldom/pull/556)
|
||||||
|
- chore: improve .gitignore and provide .envrc.template [`#697`](https://github.com/xmldom/xmldom/pull/697)
|
||||||
|
- chore: Apply security best practices [`#546`](https://github.com/xmldom/xmldom/pull/546)
|
||||||
|
- ci: check test coverage in PRs [`#524`](https://github.com/xmldom/xmldom/pull/524)
|
||||||
|
- docs: add missing commas to readme [`#566`](https://github.com/xmldom/xmldom/pull/566)
|
||||||
|
- docs: click to copy install command in readme [`#644`](https://github.com/xmldom/xmldom/pull/644)
|
||||||
|
- docs: enhance jsdoc comments [`#511`](https://github.com/xmldom/xmldom/pull/511)
|
||||||
|
|
||||||
|
Thank you, [@kboshold](https://github.com/kboshold), [@edi9999](https://github.com/edi9999), [@apupier](https://github.com/apupier),
|
||||||
|
[@shunkica](https://github.com/shunkica), [@homer0](https://github.com/homer0), [@jhauga](https://github.com/jhauga),
|
||||||
|
[@UdayKharatmol](https://github.com/UdayKharatmol), for your contributions
|
||||||
|
|
||||||
|
|
||||||
|
## [0.9.0-beta.11](https://github.com/xmldom/xmldom/compare/0.9.0-beta.10...0.9.0-beta.11)
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- report more non well-formed cases [`#519`](https://github.com/xmldom/xmldom/pull/519) / [`#45`](https://github.com/xmldom/xmldom/issues/45) / [`#125`](https://github.com/xmldom/xmldom/issues/125) / [`#467`](https://github.com/xmldom/xmldom/issues/467)
|
||||||
|
BREAKING-CHANGE: Reports more not well-formed documents as fatalError
|
||||||
|
and drop broken support for optional and unclosed tags in HTML.
|
||||||
|
|
||||||
|
### Other
|
||||||
|
|
||||||
|
- Translate/drop non English comments [`#518`](https://github.com/xmldom/xmldom/pull/518)
|
||||||
|
- use node v16 for development [`#517`](https://github.com/xmldom/xmldom/pull/517)
|
||||||
|
|
||||||
|
Thank you, [@brodybits](https://github.com/brodybits), [@cbettinger](https://github.com/cbettinger), [@josecarlosrx](https://github.com/josecarlosrx), for your contributions
|
||||||
|
|
||||||
|
|
||||||
|
## [0.9.0-beta.10](https://github.com/xmldom/xmldom/compare/0.9.0-beta.9...0.9.0-beta.10)
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- dom: prevent iteration over deleted items [`#514`](https://github.com/xmldom/xmldom/pull/514)/ [`#499`](https://github.com/xmldom/xmldom/issues/499)
|
||||||
|
|
||||||
|
### Chore
|
||||||
|
|
||||||
|
- use prettier plugin for jsdoc [`#513`](https://github.com/xmldom/xmldom/pull/513)
|
||||||
|
|
||||||
|
Thank you, [@qtow](https://github.com/qtow), [@shunkica](https://github.com/shunkica), [@homer0](https://github.com/homer0), for your contributions
|
||||||
|
|
||||||
|
|
||||||
|
## [0.8.10](https://github.com/xmldom/xmldom/compare/0.8.9...0.8.10)
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- dom: prevent iteration over deleted items [`#514`](https://github.com/xmldom/xmldom/pull/514)/ [`#499`](https://github.com/xmldom/xmldom/issues/499)
|
||||||
|
|
||||||
|
Thank you, [@qtow](https://github.com/qtow), for your contributions
|
||||||
|
|
||||||
|
|
||||||
|
## [0.7.13](https://github.com/xmldom/xmldom/compare/0.7.12...0.7.13)
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- dom: prevent iteration over deleted items [`#514`](https://github.com/xmldom/xmldom/pull/514)/ [`#499`](https://github.com/xmldom/xmldom/issues/499)
|
||||||
|
|
||||||
|
Thank you, [@qtow](https://github.com/qtow), for your contributions
|
||||||
|
|
||||||
|
|
||||||
|
## [0.9.0-beta.9](https://github.com/xmldom/xmldom/compare/0.9.0-beta.8...0.9.0-beta.9)
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- Set nodeName property in ProcessingInstruction [`#509`](https://github.com/xmldom/xmldom/pull/509) / [`#505`](https://github.com/xmldom/xmldom/issues/505)
|
||||||
|
- preserve DOCTYPE internal subset [`#498`](https://github.com/xmldom/xmldom/pull/498) / [`#497`](https://github.com/xmldom/xmldom/pull/497) / [`#117`](https://github.com/xmldom/xmldom/issues/117)\
|
||||||
|
BREAKING CHANGES: Many documents that were previously accepted by xmldom, esecially non well-formed ones are no longer accepted. Some issues that were formerly reported as errors are now a fatalError.
|
||||||
|
- DOMParser: Align parseFromString errors with specs [`#454`](https://github.com/xmldom/xmldom/pull/454)
|
||||||
|
|
||||||
|
### Chore
|
||||||
|
|
||||||
|
- stop running mutation tests using stryker [`#496`](https://github.com/xmldom/xmldom/pull/496)
|
||||||
|
- make `toErrorSnapshot` windows compatible [`#503`](https://github.com/xmldom/xmldom/pull/503)
|
||||||
|
|
||||||
|
Thank you, [@cjbarth](https://github.com/cjbarth), [@shunkica](https://github.com/shunkica), [@pmahend1](https://github.com/pmahend1), [@niklasl](https://github.com/niklasl), for your contributions
|
||||||
|
|
||||||
|
|
||||||
|
## [0.8.9](https://github.com/xmldom/xmldom/compare/0.8.8...0.8.9)
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- Set nodeName property in ProcessingInstruction [`#509`](https://github.com/xmldom/xmldom/pull/509) / [`#505`](https://github.com/xmldom/xmldom/issues/505)
|
||||||
|
|
||||||
|
Thank you, [@cjbarth](https://github.com/cjbarth), for your contributions
|
||||||
|
|
||||||
|
|
||||||
|
## [0.7.12](https://github.com/xmldom/xmldom/compare/0.7.11...0.7.12)
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- Set nodeName property in ProcessingInstruction [`#509`](https://github.com/xmldom/xmldom/pull/509) / [`#505`](https://github.com/xmldom/xmldom/issues/505)
|
||||||
|
|
||||||
|
Thank you, [@cjbarth](https://github.com/cjbarth), for your contributions
|
||||||
|
|
||||||
|
|
||||||
|
## [0.9.0-beta.8](https://github.com/xmldom/xmldom/compare/0.9.0-beta.7...0.9.0-beta.8)
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- Throw DOMException when calling removeChild with invalid parameter [`#494`](https://github.com/xmldom/xmldom/pull/494) / [`#135`](https://github.com/xmldom/xmldom/issues/135)
|
||||||
|
|
||||||
|
BREAKING CHANGE: Previously it was possible (but not documented) to call `Node.removeChild` with any node in the tree,
|
||||||
|
and with certain exceptions, it would work. This is no longer the case: calling `Node.removeChild` with an argument that is not a direct child of the node that it is called from, will throw a NotFoundError DOMException, as it is described by the specs.
|
||||||
|
|
||||||
|
Thank you, [@noseworthy](https://github.com/noseworthy), [@davidmc24](https://github.com/davidmc24), for your contributions
|
||||||
|
|
||||||
|
|
||||||
|
## [0.9.0-beta.7](https://github.com/xmldom/xmldom/compare/0.9.0-beta.6...0.9.0-beta.7)
|
||||||
|
|
||||||
|
### Feature
|
||||||
|
|
||||||
|
- Add `compareDocumentPosition` method from level 3 spec. [`#488`](https://github.com/xmldom/xmldom/pull/488)
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- `getAttribute` and `getAttributeNS` should return `null` (#477) [`#46`](https://github.com/xmldom/xmldom/issues/46)
|
||||||
|
- several issues in NamedNodeMap and Element (#482) [`#46`](https://github.com/xmldom/xmldom/issues/46)
|
||||||
|
- properly parse closing where the last attribute has no value [`#485`](https://github.com/xmldom/xmldom/pull/485) / [`#486`](https://github.com/xmldom/xmldom/issues/486)
|
||||||
|
- extend list of HTML entities [`#489`](https://github.com/xmldom/xmldom/pull/489)
|
||||||
|
|
||||||
|
BREAKING CHANGE: Iteration over attributes now happens in the right order and non-existing attributes now return `null` instead of undefined. THe same is true for the `namepsaceURI` and `prefix` of Attr nodes.
|
||||||
|
All of the changes are fixing misalignment with the DOM specs, so if you expected it to work as specified,
|
||||||
|
nothing should break for you.
|
||||||
|
|
||||||
|
### Chore
|
||||||
|
|
||||||
|
- update multiple devDependencies
|
||||||
|
- Configure jest (correctly) and wallaby [`#481`](https://github.com/xmldom/xmldom/pull/481) / [`#483`](https://github.com/xmldom/xmldom/pull/483)
|
||||||
|
|
||||||
|
Thank you, [@bulandent](https://github.com/bulandent), [@zorkow](https://github.com/zorkow), for your contributions
|
||||||
|
|
||||||
|
|
||||||
|
## [0.8.8](https://github.com/xmldom/xmldom/compare/0.8.7...0.8.8)
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- extend list of HTML entities [`#489`](https://github.com/xmldom/xmldom/pull/489)
|
||||||
|
|
||||||
|
Thank you, [@zorkow](https://github.com/zorkow), for your contributions
|
||||||
|
|
||||||
|
## [0.7.11](https://github.com/xmldom/xmldom/compare/0.7.10...0.7.11)
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- extend list of HTML entities [`#489`](https://github.com/xmldom/xmldom/pull/489)
|
||||||
|
|
||||||
|
Thank you, [@zorkow](https://github.com/zorkow), for your contributions
|
||||||
|
|
||||||
|
|
||||||
|
## [0.8.7](https://github.com/xmldom/xmldom/compare/0.8.6...0.8.7)
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- properly parse closing where the last attribute has no value [`#485`](https://github.com/xmldom/xmldom/pull/485) / [`#486`](https://github.com/xmldom/xmldom/issues/486)
|
||||||
|
|
||||||
|
Thank you, [@bulandent](https://github.com/bulandent), for your contributions
|
||||||
|
|
||||||
|
|
||||||
|
## [0.7.10](https://github.com/xmldom/xmldom/compare/0.7.9...0.7.10)
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- properly parse closing where the last attribute has no value [`#485`](https://github.com/xmldom/xmldom/pull/485) / [`#486`](https://github.com/xmldom/xmldom/issues/486)
|
||||||
|
|
||||||
|
Thank you, [@bulandent](https://github.com/bulandent), for your contributions
|
||||||
|
|
||||||
|
|
||||||
|
## [0.8.6](https://github.com/xmldom/xmldom/compare/0.8.5...0.8.6)
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- Properly check nodes before replacement [`#457`](https://github.com/xmldom/xmldom/pull/457) / [`#455`](https://github.com/xmldom/xmldom/issues/455) / [`#456`](https://github.com/xmldom/xmldom/issues/456)
|
||||||
|
|
||||||
|
Thank you, [@edemaine](https://github.com/edemaine), [@pedro-l9](https://github.com/pedro-l9), for your contributions
|
||||||
|
|
||||||
|
|
||||||
|
## [0.7.9](https://github.com/xmldom/xmldom/compare/0.7.8...0.7.9)
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- Properly check nodes before replacement [`#457`](https://github.com/xmldom/xmldom/pull/457) / [`#455`](https://github.com/xmldom/xmldom/issues/455) / [`#456`](https://github.com/xmldom/xmldom/issues/456)
|
||||||
|
|
||||||
|
Thank you, [@edemaine](https://github.com/edemaine), [@pedro-l9](https://github.com/pedro-l9), for your contributions
|
||||||
|
|
||||||
|
|
||||||
|
## [0.9.0-beta.6](https://github.com/xmldom/xmldom/compare/0.9.0-beta.5...0.9.0-beta.6)
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- Properly check nodes before replacement [`#457`](https://github.com/xmldom/xmldom/pull/457) / [`#455`](https://github.com/xmldom/xmldom/issues/455) / [`#456`](https://github.com/xmldom/xmldom/issues/456)
|
||||||
|
|
||||||
|
Thank you, [@edemaine](https://github.com/edemaine), [@pedro-l9](https://github.com/pedro-l9), for your contributions
|
||||||
|
|
||||||
|
|
||||||
|
## [0.9.0-beta.5](https://github.com/xmldom/xmldom/compare/0.9.0-beta.4...0.9.0-beta.5)
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- fix: Restore ES5 compatibility [`#452`](https://github.com/xmldom/xmldom/pull/452) / [`#453`](https://github.com/xmldom/xmldom/issues/453)
|
||||||
|
|
||||||
|
Thank you, [@fengxinming](https://github.com/fengxinming), for your contributions
|
||||||
|
|
||||||
|
|
||||||
|
## [0.8.5](https://github.com/xmldom/xmldom/compare/0.8.4...0.8.5)
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- fix: Restore ES5 compatibility [`#452`](https://github.com/xmldom/xmldom/pull/452) / [`#453`](https://github.com/xmldom/xmldom/issues/453)
|
||||||
|
|
||||||
|
Thank you, [@fengxinming](https://github.com/fengxinming), for your contributions
|
||||||
|
|
||||||
|
|
||||||
|
## [0.7.8](https://github.com/xmldom/xmldom/compare/0.7.7...0.7.8)
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- fix: Restore ES5 compatibility [`#452`](https://github.com/xmldom/xmldom/pull/452) / [`#453`](https://github.com/xmldom/xmldom/issues/453)
|
||||||
|
|
||||||
|
Thank you, [@fengxinming](https://github.com/fengxinming), for your contributions
|
||||||
|
|
||||||
|
|
||||||
|
## [0.9.0-beta.4](https://github.com/xmldom/xmldom/compare/0.9.0-beta.3...0.9.0-beta.4)
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- Security: Prevent inserting DOM nodes when they are not well-formed [`CVE-2022-39353`](https://github.com/xmldom/xmldom/security/advisories/GHSA-crh6-fp67-6883)
|
||||||
|
In case such a DOM would be created, the part that is not well-formed will be transformed into text nodes, in which xml specific characters like `<` and `>` are encoded accordingly.
|
||||||
|
In the upcoming version 0.9.0 those text nodes will no longer be added and an error will be thrown instead.
|
||||||
|
This change can break your code, if you relied on this behavior, e.g. multiple root elements in the past. We consider it more important to align with the specs that we want to be aligned with, considering the potential security issues that might derive from people not being aware of the difference in behavior.
|
||||||
|
Related Spec: <https://dom.spec.whatwg.org/#concept-node-ensure-pre-insertion-validity>
|
||||||
|
|
||||||
|
### Chore
|
||||||
|
|
||||||
|
- update multiple devDependencies
|
||||||
|
- Add eslint-plugin-node for `lib` [`#448`](https://github.com/xmldom/xmldom/pull/448) / [`#190`](https://github.com/xmldom/xmldom/issues/190)
|
||||||
|
- style: Apply prettier to all code [`#447`](https://github.com/xmldom/xmldom/pull/447) / [`#29`](https://github.com/xmldom/xmldom/issues/29) / [`#130`](https://github.com/xmldom/xmldom/issues/130)
|
||||||
|
|
||||||
|
Thank you, [@XhmikosR](https://github.com/XhmikosR), [@awwright](https://github.com/awwright), [@frumioj](https://github.com/frumioj), [@cjbarth](https://github.com/cjbarth), [@markgollnick](https://github.com/markgollnick) for your contributions
|
||||||
|
|
||||||
|
|
||||||
|
## [0.8.4](https://github.com/xmldom/xmldom/compare/0.8.3...0.8.4)
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- Security: Prevent inserting DOM nodes when they are not well-formed [`CVE-2022-39353`](https://github.com/xmldom/xmldom/security/advisories/GHSA-crh6-fp67-6883)
|
||||||
|
In case such a DOM would be created, the part that is not well-formed will be transformed into text nodes, in which xml specific characters like `<` and `>` are encoded accordingly.
|
||||||
|
In the upcoming version 0.9.0 those text nodes will no longer be added and an error will be thrown instead.
|
||||||
|
This change can break your code, if you relied on this behavior, e.g. multiple root elements in the past. We consider it more important to align with the specs that we want to be aligned with, considering the potential security issues that might derive from people not being aware of the difference in behavior.
|
||||||
|
Related Spec: <https://dom.spec.whatwg.org/#concept-node-ensure-pre-insertion-validity>
|
||||||
|
|
||||||
|
Thank you, [@frumioj](https://github.com/frumioj), [@cjbarth](https://github.com/cjbarth), [@markgollnick](https://github.com/markgollnick) for your contributions
|
||||||
|
|
||||||
|
|
||||||
|
## [0.7.7](https://github.com/xmldom/xmldom/compare/0.7.6...0.7.7)
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- Security: Prevent inserting DOM nodes when they are not well-formed [`CVE-2022-39353`](https://github.com/xmldom/xmldom/security/advisories/GHSA-crh6-fp67-6883)
|
||||||
|
In case such a DOM would be created, the part that is not well-formed will be transformed into text nodes, in which xml specific characters like `<` and `>` are encoded accordingly.
|
||||||
|
In the upcoming version 0.9.0 those text nodes will no longer be added and an error will be thrown instead.
|
||||||
|
This change can break your code, if you relied on this behavior, e.g. multiple root elements in the past. We consider it more important to align with the specs that we want to be aligned with, considering the potential security issues that might derive from people not being aware of the difference in behavior.
|
||||||
|
Related Spec: <https://dom.spec.whatwg.org/#concept-node-ensure-pre-insertion-validity>
|
||||||
|
|
||||||
|
Thank you, [@frumioj](https://github.com/frumioj), [@cjbarth](https://github.com/cjbarth), [@markgollnick](https://github.com/markgollnick) for your contributions
|
||||||
|
|
||||||
|
|
||||||
|
## [0.9.0-beta.3](https://github.com/xmldom/xmldom/compare/0.9.0-beta.2...0.9.0-beta.3)
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- fix: Stop adding tags after incomplete closing tag [`#445`](https://github.com/xmldom/xmldom/pull/445) / [`#416`](https://github.com/xmldom/xmldom/pull/416)
|
||||||
|
BREAKING CHANGE: It no longer reports an error when parsing HTML containing incomplete closing tags, to align the behavior with the one in the browser.
|
||||||
|
BREAKING CHANGE: If your code relied on not well-formed XML to be parsed and include subsequent tags, this will no longer work.
|
||||||
|
- fix: Avoid bidirectional characters in source code [`#440`](https://github.com/xmldom/xmldom/pull/440)
|
||||||
|
|
||||||
|
### Other
|
||||||
|
|
||||||
|
- ci: Add CodeQL scan [`#444`](https://github.com/xmldom/xmldom/pull/444)
|
||||||
|
|
||||||
|
Thank you, [@ACN-kck](https://github.com/ACN-kck), [@mgerlach](https://github.com/mgerlach) for your contributions
|
||||||
|
|
||||||
|
|
||||||
|
## [0.7.6](https://github.com/xmldom/xmldom/compare/0.7.5...0.7.6)
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- Avoid iterating over prototype properties [`#441`](https://github.com/xmldom/xmldom/pull/441) / [`#437`](https://github.com/xmldom/xmldom/pull/437) / [`#436`](https://github.com/xmldom/xmldom/issues/436)
|
||||||
|
|
||||||
|
Thank you, [@jftanner](https://github.com/jftanner), [@Supraja9726](https://github.com/Supraja9726) for your contributions
|
||||||
|
|
||||||
|
|
||||||
|
## [0.8.3](https://github.com/xmldom/xmldom/compare/0.8.3...0.8.2)
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- Avoid iterating over prototype properties [`#437`](https://github.com/xmldom/xmldom/pull/437) / [`#436`](https://github.com/xmldom/xmldom/issues/436)
|
||||||
|
|
||||||
|
Thank you, [@Supraja9726](https://github.com/Supraja9726) for your contributions
|
||||||
|
|
||||||
|
|
||||||
|
## [0.9.0-beta.2](https://github.com/xmldom/xmldom/compare/0.9.0-beta.1...0.9.0-beta.2)
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- Avoid iterating over prototype properties [`#437`](https://github.com/xmldom/xmldom/pull/437) / [`#436`](https://github.com/xmldom/xmldom/issues/436)
|
||||||
|
|
||||||
|
Thank you, [@Supraja9726](https://github.com/Supraja9726) for your contributions
|
||||||
|
|
||||||
|
|
||||||
|
## [0.9.0-beta.1](https://github.com/xmldom/xmldom/compare/0.8.2...0.9.0-beta.1)
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
**Only use HTML rules if mimeType matches** [`#338`](https://github.com/xmldom/xmldom/pull/338), fixes [`#203`](https://github.com/xmldom/xmldom/issues/203)
|
||||||
|
|
||||||
|
In the living specs for parsing XML and HTML, that this library is trying to implement,
|
||||||
|
there is a distinction between the different types of documents being parsed:
|
||||||
|
There are quite some rules that are different for parsing, constructing and serializing XML vs HTML documents.
|
||||||
|
|
||||||
|
So far xmldom was always "detecting" whether "the HTML rules should be applied" by looking at the current namespace. So from the first time an the HTML default namespace (`http://www.w3.org/1999/xhtml`) was found, every node was treated as being part of an HTML document. This misconception is the root cause for quite some reported bugs.
|
||||||
|
|
||||||
|
BREAKING CHANGE: HTML rules are no longer applied just because of the namespace, but require the `mimeType` argument passed to `DOMParser.parseFromString(source, mimeType)` to match `'text/html'`. Doing so implies all rules for handling casing for tag and attribute names when parsing, creation of nodes and searching nodes.
|
||||||
|
|
||||||
|
BREAKING CHANGE: Correct the return type of `DOMParser.parseFromString` to `Document | undefined`. In case of parsing errors it was always possible that "the returned `Document`" has not been created. In case you are using Typescript you now need to handle those cases.
|
||||||
|
|
||||||
|
BREAKING CHANGE: The instance property `DOMParser.options` is no longer available, instead use the individual `readonly` property per option (`assign`, `domHandler`, `errorHandler`, `normalizeLineEndings`, `locator`, `xmlns`). Those also provides the default value if the option was not passed. The 'locator' option is now just a boolean (default remains `true`).
|
||||||
|
|
||||||
|
BREAKING CHANGE: The following methods no longer allow a (non spec compliant) boolean argument to toggle "HTML rules":
|
||||||
|
- `XMLSerializer.serializeToString`
|
||||||
|
- `Node.toString`
|
||||||
|
- `Document.toString`
|
||||||
|
|
||||||
|
The following interfaces have been implemented:
|
||||||
|
`DOMImplementation` now implements all methods defined in the DOM spec, but not all of the behavior is implemented (see docstring):
|
||||||
|
- `createDocument` creates an "XML Document" (prototype: `Document`, property `type` is `'xml'`)
|
||||||
|
- `createHTMLDocument` creates an "HTML Document" (type/prototype: `Document`, property `type` is `'html'`).
|
||||||
|
- when no argument is passed or the first argument is a string, the basic nodes for an HTML structure are created, as specified
|
||||||
|
- when the first argument is `false` no child nodes are created
|
||||||
|
|
||||||
|
`Document` now has two new readonly properties as specified in the DOM spec:
|
||||||
|
- `contentType` which is the mime-type that was used to create the document
|
||||||
|
- `type` which is either the string literal `'xml'` or `'html'`
|
||||||
|
|
||||||
|
`MIME_TYPE` (`/lib/conventions.js`):
|
||||||
|
- `hasDefaultHTMLNamespace` test if the provided string is one of the miem types that implies the default HTML namespace: `text/html` or `application/xhtml+xml`
|
||||||
|
|
||||||
|
Thank you [@weiwu-zhang](https://github.com/weiwu-zhang) for your contributions
|
||||||
|
|
||||||
|
### Chore
|
||||||
|
|
||||||
|
- update multiple devDependencies
|
||||||
|
|
||||||
|
|
||||||
|
## [0.8.2](https://github.com/xmldom/xmldom/compare/0.8.1...0.8.2)
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- fix(dom): Serialize `>` as specified (#395) [`#58`](https://github.com/xmldom/xmldom/issues/58)
|
||||||
|
|
||||||
|
### Other
|
||||||
|
- docs: Add `nodeType` values to public interface description [`#396`](https://github.com/xmldom/xmldom/pull/396)
|
||||||
|
- test: Add executable examples for node and typescript [`#317`](https://github.com/xmldom/xmldom/pull/317)
|
||||||
|
- fix(dom): Serialize `>` as specified [`#395`](https://github.com/xmldom/xmldom/pull/395)
|
||||||
|
- chore: Add minimal `Object.assign` ponyfill [`#379`](https://github.com/xmldom/xmldom/pull/379)
|
||||||
|
- docs: Refine release documentation [`#378`](https://github.com/xmldom/xmldom/pull/378)
|
||||||
|
- chore: update various dev dependencies
|
||||||
|
|
||||||
|
Thank you [@niklasl](https://github.com/niklasl), [@cburatto](https://github.com/cburatto), [@SheetJSDev](https://github.com/SheetJSDev), [@pyrsmk](https://github.com/pyrsmk) for your contributions
|
||||||
|
|
||||||
|
## [0.8.1](https://github.com/xmldom/xmldom/compare/0.8.0...0.8.1)
|
||||||
|
|
||||||
|
### Fixes
|
||||||
|
- Only use own properties in entityMap [`#374`](https://github.com/xmldom/xmldom/pull/374)
|
||||||
|
|
||||||
|
### Docs
|
||||||
|
- Add security policy [`#365`](https://github.com/xmldom/xmldom/pull/365)
|
||||||
|
- changelog: Correct contributor name and link [`#366`](https://github.com/xmldom/xmldom/pull/366)
|
||||||
|
- Describe release/publish steps [`#358`](https://github.com/xmldom/xmldom/pull/358), [`#376`](https://github.com/xmldom/xmldom/pull/376)
|
||||||
|
- Add snyk package health badge [`#360`](https://github.com/xmldom/xmldom/pull/360)
|
||||||
|
|
||||||
|
|
||||||
|
## [0.8.0](https://github.com/xmldom/xmldom/compare/0.7.5...0.8.0)
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- Normalize all line endings according to XML specs [1.0](https://w3.org/TR/xml/#sec-line-ends) and [1.1](https://www.w3.org/TR/xml11/#sec-line-ends) \
|
||||||
|
BREAKING CHANGE: Certain combination of line break characters are normalized to a single `\n` before parsing takes place and will no longer be preserved.
|
||||||
|
- [`#303`](https://github.com/xmldom/xmldom/issues/303) / [`#307`](https://github.com/xmldom/xmldom/pull/307)
|
||||||
|
- [`#49`](https://github.com/xmldom/xmldom/issues/49), [`#97`](https://github.com/xmldom/xmldom/issues/97), [`#324`](https://github.com/xmldom/xmldom/issues/324) / [`#314`](https://github.com/xmldom/xmldom/pull/314)
|
||||||
|
- XMLSerializer: Preserve whitespace character references [`#284`](https://github.com/xmldom/xmldom/issues/284) / [`#310`](https://github.com/xmldom/xmldom/pull/310) \
|
||||||
|
BREAKING CHANGE: If you relied on the not spec compliant preservation of literal `\t`, `\n` or `\r` in **attribute values**.
|
||||||
|
To preserve those you will have to create XML that instead contains the correct numerical (or hexadecimal) equivalent (e.g. `	`, `
`, `
`).
|
||||||
|
- Drop deprecated exports `DOMImplementation` and `XMLSerializer` from `lib/dom-parser.js` [#53](https://github.com/xmldom/xmldom/issues/53) / [`#309`](https://github.com/xmldom/xmldom/pull/309)
|
||||||
|
BREAKING CHANGE: Use the one provided by the main package export.
|
||||||
|
- dom: Remove all links as part of `removeChild` [`#343`](https://github.com/xmldom/xmldom/issues/343) / [`#355`](https://github.com/xmldom/xmldom/pull/355)
|
||||||
|
|
||||||
|
### Chore
|
||||||
|
- ci: Restore latest tested node version to 16.x [`#325`](https://github.com/xmldom/xmldom/pull/325)
|
||||||
|
- ci: Split test and lint steps into jobs [`#111`](https://github.com/xmldom/xmldom/issues/111) / [`#304`](https://github.com/xmldom/xmldom/pull/304)
|
||||||
|
- Pinned and updated devDependencies
|
||||||
|
|
||||||
|
Thank you [@marrus-sh](https://github.com/marrus-sh), [@victorandree](https://github.com/victorandree), [@mdierolf](https://github.com/mdierolf), [@tsabbay](https://github.com/tsabbay), [@fatihpense](https://github.com/fatihpense) for your contributions
|
||||||
|
|
||||||
|
## 0.7.5
|
||||||
|
|
||||||
|
[Commits](https://github.com/xmldom/xmldom/compare/0.7.4...0.7.5)
|
||||||
|
|
||||||
|
### Fixes:
|
||||||
|
|
||||||
|
- Preserve default namespace when serializing [`#319`](https://github.com/xmldom/xmldom/issues/319) / [`#321`](https://github.com/xmldom/xmldom/pull/321)
|
||||||
|
Thank you, [@lupestro](https://github.com/lupestro)
|
||||||
|
|
||||||
|
## 0.7.4
|
||||||
|
|
||||||
|
[Commits](https://github.com/xmldom/xmldom/compare/0.7.3...0.7.4)
|
||||||
|
|
||||||
|
### Fixes:
|
||||||
|
|
||||||
|
- Restore ability to parse `__prototype__` attributes [`#315`](https://github.com/xmldom/xmldom/pull/315)
|
||||||
|
Thank you, [@dsimpsonOMF](https://github.com/dsimpsonOMF)
|
||||||
|
|
||||||
|
## 0.7.3
|
||||||
|
|
||||||
|
[Commits](https://github.com/xmldom/xmldom/compare/0.7.2...0.7.3)
|
||||||
|
|
||||||
|
### Fixes:
|
||||||
|
|
||||||
|
- Add doctype when parsing from string [`#277`](https://github.com/xmldom/xmldom/issues/277) / [`#301`](https://github.com/xmldom/xmldom/pull/301)
|
||||||
|
- Correct typo in error message [`#294`](https://github.com/xmldom/xmldom/pull/294)
|
||||||
|
Thank you, [@rrthomas](https://github.com/rrthomas)
|
||||||
|
|
||||||
|
### Refactor:
|
||||||
|
|
||||||
|
- Improve exports & require statements, new main package entry [`#233`](https://github.com/xmldom/xmldom/pull/233)
|
||||||
|
|
||||||
|
### Docs:
|
||||||
|
|
||||||
|
- Fix Stryker badge [`#298`](https://github.com/xmldom/xmldom/pull/298)
|
||||||
|
- Fix link to help-wanted issues [`#299`](https://github.com/xmldom/xmldom/pull/299)
|
||||||
|
|
||||||
|
### Chore:
|
||||||
|
|
||||||
|
- Execute stryker:dry-run on branches [`#302`](https://github.com/xmldom/xmldom/pull/302)
|
||||||
|
- Fix stryker config [`#300`](https://github.com/xmldom/xmldom/pull/300)
|
||||||
|
- Split test and lint scripts [`#297`](https://github.com/xmldom/xmldom/pull/297)
|
||||||
|
- Switch to stryker dashboard owned by org [`#292`](https://github.com/xmldom/xmldom/pull/292)
|
||||||
|
|
||||||
|
## 0.7.2
|
||||||
|
|
||||||
|
[Commits](https://github.com/xmldom/xmldom/compare/0.7.1...0.7.2)
|
||||||
|
|
||||||
|
### Fixes:
|
||||||
|
|
||||||
|
- Types: Add index.d.ts to packaged files [`#288`](https://github.com/xmldom/xmldom/pull/288)
|
||||||
|
Thank you, [@forty](https://github.com/forty)
|
||||||
|
|
||||||
|
## 0.7.1
|
||||||
|
|
||||||
|
[Commits](https://github.com/xmldom/xmldom/compare/0.7.0...0.7.1)
|
||||||
|
|
||||||
|
### Fixes:
|
||||||
|
|
||||||
|
- Types: Copy types from DefinitelyTyped [`#283`](https://github.com/xmldom/xmldom/pull/283)
|
||||||
|
Thank you, [@kachkaev](https://github.com/kachkaev)
|
||||||
|
|
||||||
|
### Chore:
|
||||||
|
- package.json: remove author, maintainers, etc. [`#279`](https://github.com/xmldom/xmldom/pull/279)
|
||||||
|
|
||||||
|
## 0.7.0
|
||||||
|
|
||||||
|
[Commits](https://github.com/xmldom/xmldom/compare/0.6.0...0.7.0)
|
||||||
|
|
||||||
|
Due to [`#271`](https://github.com/xmldom/xmldom/issue/271) this version was published as
|
||||||
|
- unscoped `xmldom` package to github (git tags [`0.7.0`](https://github.com/xmldom/xmldom/tree/0.7.0) and [`0.7.0+unscoped`](https://github.com/xmldom/xmldom/tree/0.7.0%2Bunscoped))
|
||||||
|
- scoped `@xmldom/xmldom` package to npm (git tag `0.7.0+scoped`)
|
||||||
|
For more details look at [`#278`](https://github.com/xmldom/xmldom/pull/278#issuecomment-902172483)
|
||||||
|
|
||||||
|
### Fixes:
|
||||||
|
|
||||||
|
- Security: Misinterpretation of malicious XML input [`CVE-2021-32796`](https://github.com/xmldom/xmldom/security/advisories/GHSA-5fg8-2547-mr8q)
|
||||||
|
- Implement `Document.getElementsByClassName` as specified [`#213`](https://github.com/xmldom/xmldom/pull/213), thank you, [@ChALkeR](https://github.com/ChALkeR)
|
||||||
|
- Inherit namespace prefix from parent when required [`#268`](https://github.com/xmldom/xmldom/pull/268)
|
||||||
|
- Handle whitespace in closing tags [`#267`](https://github.com/xmldom/xmldom/pull/267)
|
||||||
|
- Update `DOMImplementation` according to recent specs [`#210`](https://github.com/xmldom/xmldom/pull/210)
|
||||||
|
BREAKING CHANGE: Only if you "passed features to be marked as available as a constructor arguments" and expected it to "magically work".
|
||||||
|
- No longer serializes any namespaces with an empty URI [`#244`](https://github.com/xmldom/xmldom/pull/244)
|
||||||
|
(related to [`#168`](https://github.com/xmldom/xmldom/pull/168) released in 0.6.0)
|
||||||
|
BREAKING CHANGE: Only if you rely on ["unsetting" a namespace prefix](https://github.com/xmldom/xmldom/pull/168#issuecomment-886984994) by setting it to an empty string
|
||||||
|
- Set `localName` as part of `Document.createElement` [`#229`](https://github.com/xmldom/xmldom/pull/229), thank you, [@rrthomas](https://github.com/rrthomas)
|
||||||
|
|
||||||
|
### CI
|
||||||
|
|
||||||
|
- We are now additionally running tests against node v16
|
||||||
|
- Stryker tests on the master branch now run against node v14
|
||||||
|
|
||||||
|
### Docs
|
||||||
|
|
||||||
|
- Describe relations with and between specs: [`#211`](https://github.com/xmldom/xmldom/pull/211), [`#247`](https://github.com/xmldom/xmldom/pull/247)
|
||||||
|
|
||||||
|
## 0.6.0
|
||||||
|
|
||||||
|
[Commits](https://github.com/xmldom/xmldom/compare/0.5.0...0.6.0)
|
||||||
|
|
||||||
|
### Fixes
|
||||||
|
|
||||||
|
- Stop serializing empty namespace values like `xmlns:ds=""` [`#168`](https://github.com/xmldom/xmldom/pull/168)
|
||||||
|
BREAKING CHANGE: If your code expected empty namespaces attributes to be serialized.
|
||||||
|
Thank you, [@pdecat](https://github.com/pdecat) and [@FranckDepoortere](https://github.com/FranckDepoortere)
|
||||||
|
- Escape `<` to `<` when serializing attribute values [`#198`](https://github.com/xmldom/xmldom/issues/198) / [`#199`](https://github.com/xmldom/xmldom/pull/199)
|
||||||
|
|
||||||
|
## 0.5.0
|
||||||
|
|
||||||
|
[Commits](https://github.com/xmldom/xmldom/compare/0.4.0...0.5.0)
|
||||||
|
|
||||||
|
### Fixes
|
||||||
|
- Avoid misinterpretation of malicious XML input - [`GHSA-h6q6-9hqw-rwfv`](https://github.com/xmldom/xmldom/security/advisories/GHSA-h6q6-9hqw-rwfv) (CVE-2021-21366)
|
||||||
|
- Improve error reporting; throw on duplicate attribute\
|
||||||
|
BREAKING CHANGE: It is currently not clear how to consistently deal with duplicate attributes, so it's also safer for our users to fail when detecting them.
|
||||||
|
It's possible to configure the `DOMParser.errorHandler` before parsing, to handle those errors differently.
|
||||||
|
|
||||||
|
To accomplish this and also be able to verify it in tests I needed to
|
||||||
|
- create a new `Error` type `ParseError` and export it
|
||||||
|
- Throw `ParseError` from `errorHandler.fatalError` and prevent those from being caught in `XMLReader`.
|
||||||
|
- export `DOMHandler` constructor as `__DOMHandler`
|
||||||
|
- Preserve quotes in DOCTYPE declaration
|
||||||
|
Since the only purpose of parsing the DOCTYPE is to be able to restore it when serializing, we decided that it would be best to leave the parsed `publicId` and `systemId` as is, including any quotes.
|
||||||
|
BREAKING CHANGE: If somebody relies on the actual unquoted values of those ids, they will need to take care of either single or double quotes and the right escaping.
|
||||||
|
(Without this change this would not have been possible because the SAX parser already dropped the information about the quotes that have been used in the source.)
|
||||||
|
|
||||||
|
https://www.w3.org/TR/2006/REC-xml11-20060816/#dtd
|
||||||
|
https://www.w3.org/TR/2006/REC-xml11-20060816/#IDAX1KS (External Entity Declaration)
|
||||||
|
|
||||||
|
- Fix breaking preprocessors' directives when parsing attributes [`#171`](https://github.com/xmldom/xmldom/pull/171)
|
||||||
|
- fix(dom): Escape `]]>` when serializing CharData [`#181`](https://github.com/xmldom/xmldom/pull/181)
|
||||||
|
- Switch to (only) MIT license (drop problematic LGPL license option) [`#178`](https://github.com/xmldom/xmldom/pull/178)
|
||||||
|
- Export DOMException; remove custom assertions; etc. [`#174`](https://github.com/xmldom/xmldom/pull/174)
|
||||||
|
|
||||||
|
### Docs
|
||||||
|
- Update MDN links in `readme.md` [`#188`](https://github.com/xmldom/xmldom/pull/188)
|
||||||
|
|
||||||
|
## 0.4.0
|
||||||
|
|
||||||
|
[Commits](https://github.com/xmldom/xmldom/compare/0.3.0...0.4.0)
|
||||||
|
|
||||||
|
### Fixes
|
||||||
|
- **BREAKING** Restore ` ` behavior from v0.1.27 [`#67`](https://github.com/xmldom/xmldom/pull/67)
|
||||||
|
- **BREAKING** Typecheck source param before parsing [`#113`](https://github.com/xmldom/xmldom/pull/113)
|
||||||
|
- Include documents in package files list [`#156`](https://github.com/xmldom/xmldom/pull/156)
|
||||||
|
- Preserve doctype with sysid [`#144`](https://github.com/xmldom/xmldom/pull/144)
|
||||||
|
- Remove ES6 syntax from getElementsByClassName [`#91`](https://github.com/xmldom/xmldom/pull/91)
|
||||||
|
- Revert "Add lowercase of åäö in entityMap" due to duplicate entries [`#84`](https://github.com/xmldom/xmldom/pull/84)
|
||||||
|
- fix: Convert all line separators to LF [`#66`](https://github.com/xmldom/xmldom/pull/66)
|
||||||
|
|
||||||
|
### Docs
|
||||||
|
- Update CHANGELOG.md through version 0.3.0 [`#63`](https://github.com/xmldom/xmldom/pull/63)
|
||||||
|
- Update badges [`#78`](https://github.com/xmldom/xmldom/pull/78)
|
||||||
|
- Add .editorconfig file [`#104`](https://github.com/xmldom/xmldom/pull/104)
|
||||||
|
- Add note about import [`#79`](https://github.com/xmldom/xmldom/pull/79)
|
||||||
|
- Modernize & improve the example in readme.md [`#81`](https://github.com/xmldom/xmldom/pull/81)
|
||||||
|
|
||||||
|
### CI
|
||||||
|
- Add Stryker Mutator [`#70`](https://github.com/xmldom/xmldom/pull/70)
|
||||||
|
- Add Stryker action to update dashboard [`#77`](https://github.com/xmldom/xmldom/pull/77)
|
||||||
|
- Add Node GitHub action workflow [`#64`](https://github.com/xmldom/xmldom/pull/64)
|
||||||
|
- add & enable eslint [`#106`](https://github.com/xmldom/xmldom/pull/106)
|
||||||
|
- Use eslint-plugin-es5 to enforce ES5 syntax [`#107`](https://github.com/xmldom/xmldom/pull/107)
|
||||||
|
- Recover `vows` tests, drop `proof` tests [`#59`](https://github.com/xmldom/xmldom/pull/59)
|
||||||
|
- Add jest tessuite and first tests [`#114`](https://github.com/xmldom/xmldom/pull/114)
|
||||||
|
- Add jest testsuite with `xmltest` cases [`#112`](https://github.com/xmldom/xmldom/pull/112)
|
||||||
|
- Configure Renovate [`#108`](https://github.com/xmldom/xmldom/pull/108)
|
||||||
|
- Test European HTML entities [`#86`](https://github.com/xmldom/xmldom/pull/86)
|
||||||
|
- Updated devDependencies
|
||||||
|
|
||||||
|
### Other
|
||||||
|
- Remove files that are not of any use [`#131`](https://github.com/xmldom/xmldom/pull/131), [`#65`](https://github.com/xmldom/xmldom/pull/65), [`#33`](https://github.com/xmldom/xmldom/pull/33)
|
||||||
|
|
||||||
|
## 0.3.0
|
||||||
|
|
||||||
|
[Commits](https://github.com/xmldom/xmldom/compare/0.2.1...0.3.0)
|
||||||
|
|
||||||
|
- **BREAKING** Node >=10.x is now required.
|
||||||
|
- **BREAKING** Remove `component.json` (deprecated package manager https://github.com/componentjs/guide)
|
||||||
|
- **BREAKING** Move existing sources into `lib` subdirectory.
|
||||||
|
- **POSSIBLY BREAKING** Introduce `files` entry in `package.json` and remove use of `.npmignore`.
|
||||||
|
- [Add `Document.getElementsByClassName`](https://github.com/xmldom/xmldom/issues/24).
|
||||||
|
- [Add `Node` to the list of exports](https://github.com/xmldom/xmldom/pull/27)
|
||||||
|
- [Add lowercase of åäö in `entityMap`](https://github.com/xmldom/xmldom/pull/23).
|
||||||
|
- Move CHANGELOG to markdown file.
|
||||||
|
- Move LICENSE to markdown file.
|
||||||
|
|
||||||
|
## 0.2.1
|
||||||
|
|
||||||
|
[Commits](https://github.com/xmldom/xmldom/compare/0.2.0...0.2.1)
|
||||||
|
|
||||||
|
- Correct `homepage`, `repository` and `bugs` URLs in `package.json`.
|
||||||
|
|
||||||
|
## 0.2.0
|
||||||
|
|
||||||
|
[Commits](https://github.com/xmldom/xmldom/compare/v0.1.27...0.2.0)
|
||||||
|
|
||||||
|
- Includes all **BREAKING** changes introduced in [`xmldom-alpha@v0.1.28`](#0128) by the original authors.
|
||||||
|
- **POSSIBLY BREAKING** [remove the `Object.create` check from the `_extends` method of `dom.js` that added a `__proto__` property](https://github.com/xmldom/xmldom/commit/0be2ae910a8a22c9ec2cac042e04de4c04317d2a#diff-7d1c5d97786fdf9af5446a241d0b6d56L19-L22) ().
|
||||||
|
- **POSSIBLY BREAKING** [remove code that added a `__proto__` property](https://github.com/xmldom/xmldom/commit/366159a76a181ce9a0d83f5dc48205686cfaf9cc)
|
||||||
|
- formatting/corrections in `package.json`
|
||||||
|
|
||||||
|
## 0.1.31
|
||||||
|
|
||||||
|
[Commits](https://github.com/xmldom/xmldom/compare/v0.1.27...v0.1.31)
|
||||||
|
|
||||||
|
The patch versions (`v0.1.29` - `v0.1.31`) that have been released on the [v0.1.x branch](https://github.com/xmldom/xmldom/tree/0.1.x), to reflect the changed maintainers, **are branched off from [`v0.1.27`](#0127) so they don't include the breaking changes introduced in [`xmldom-alpha@v0.1.28`](#0128)**:
|
||||||
|
|
||||||
|
## Maintainer changes
|
||||||
|
|
||||||
|
After the last commit to the original repository <https://github.com/jindw/xmldom> on the 9th of May 2017, the first commit to <https://github.com/xmldom/xmldom> is from the 19th of December 2019. [The fork has been announced in the original repository on the 2nd of March 2020.](https://github.com/jindw/xmldom/issues/259)
|
||||||
|
|
||||||
|
The versions listed below have been published to one or both of the following packages:
|
||||||
|
- <https://www.npmjs.com/package/xmldom-alpha>
|
||||||
|
- <https://www.npmjs.com/package/xmldom>
|
||||||
|
|
||||||
|
It is currently not planned to continue publishing the `xmldom-alpha` package.
|
||||||
|
|
||||||
|
The new maintainers did not invest time to understand changes that led to the last `xmldom` version [`0.1.27`](#0127) published by the original maintainer, but consider it the basis for their work.
|
||||||
|
A timeline of all the changes that happened from that version until `0.3.0` is available in <https://github.com/xmldom/xmldom/issues/62>. Any related questions should be asked there.
|
||||||
|
|
||||||
|
## 0.1.28
|
||||||
|
|
||||||
|
[Commits](https://github.com/xmldom/xmldom/compare/v0.1.27...xmldom-alpha@v0.1.28)
|
||||||
|
|
||||||
|
Published by @jindw on the 9th of May 2017 as
|
||||||
|
- `xmldom-alpha@0.1.28`
|
||||||
|
|
||||||
|
- **BREAKING** includes [regression regarding ` ` (issue #57)](https://github.com/xmldom/xmldom/issues/57)
|
||||||
|
- [Fix `license` field in `package.json`](https://github.com/jindw/xmldom/pull/178)
|
||||||
|
- [Conditional converting of HTML entities](https://github.com/jindw/xmldom/pull/80)
|
||||||
|
- Fix `dom.js` serialization issue for missing document element ([example that failed on `toString()` before this change](https://github.com/xmldom/xmldom/blob/a58dcf7a265522e80ce520fe3be0cddb1b976f6f/test/parse/unclosedcomment.js#L10-L11))
|
||||||
|
- Add new module `entities.js`
|
||||||
|
|
||||||
|
## 0.1.27
|
||||||
|
|
||||||
|
Published by @jindw on the 28th of Nov 2016 as
|
||||||
|
- `xmldom@0.1.27`
|
||||||
|
- `xmldom-alpha@0.1.27`
|
||||||
|
|
||||||
|
- Various bug fixes.
|
||||||
|
|
||||||
|
## 0.1.26
|
||||||
|
|
||||||
|
Published on the 18th of Nov 2016
|
||||||
|
as `xmldom@0.1.26`
|
||||||
|
|
||||||
|
- Details unknown
|
||||||
|
|
||||||
|
## 0.1.25
|
||||||
|
|
||||||
|
Published on the 18th of Nov 2016 as
|
||||||
|
- `xmldom@0.1.25`
|
||||||
|
|
||||||
|
- Details unknown
|
||||||
|
|
||||||
|
## 0.1.24
|
||||||
|
|
||||||
|
Published on the 27th of November 2016 as
|
||||||
|
- `xmldom@0.1.24`
|
||||||
|
- `xmldom-alpha@0.1.24`
|
||||||
|
|
||||||
|
- Added node filter.
|
||||||
|
|
||||||
|
## 0.1.23
|
||||||
|
|
||||||
|
Published on the 5th of May 2016 as
|
||||||
|
- `xmldom-alpha@0.1.23`
|
||||||
|
|
||||||
|
- Add namespace support for nest node serialize.
|
||||||
|
- Various other bug fixes.
|
||||||
|
|
||||||
|
## 0.1.22
|
||||||
|
|
||||||
|
- Merge XMLNS serialization.
|
||||||
|
- Remove \r from source string.
|
||||||
|
- Print namespaces for child elements.
|
||||||
|
- Switch references to nodeType to use named constants.
|
||||||
|
- Add nodelist toString support.
|
||||||
|
|
||||||
|
## 0.1.21
|
||||||
|
|
||||||
|
- Fix serialize bug.
|
||||||
|
|
||||||
|
## 0.1.20
|
||||||
|
|
||||||
|
- Optimize invalid XML support.
|
||||||
|
- Add toString sorter for attributes output.
|
||||||
|
- Add html self closed node button.
|
||||||
|
- Add `*` NS support for getElementsByTagNameNS.
|
||||||
|
- Convert attribute's value to string in setAttributeNS.
|
||||||
|
- Add support for HTML entities for HTML docs only.
|
||||||
|
- Fix TypeError when Document is created with DocumentType.
|
||||||
|
|
||||||
|
## 0.1.19
|
||||||
|
|
||||||
|
- Fix [infinite loop on unclosed comment (jindw/xmldom#68)](https://github.com/jindw/xmldom/issues/68)
|
||||||
|
- Add error report for unclosed tag.
|
||||||
|
- Various other fixes.
|
||||||
|
|
||||||
|
## 0.1.18
|
||||||
|
|
||||||
|
- Add default `ns` support.
|
||||||
|
- parseFromString now renders entirely plain text documents as textNode.
|
||||||
|
- Enable option to ignore white space on parsing.
|
||||||
|
|
||||||
|
## 0.1.17
|
||||||
|
|
||||||
|
**Details missing for this and potential earlier version**
|
||||||
|
|
||||||
|
## 0.1.16
|
||||||
|
|
||||||
|
- Correctly handle multibyte Unicode greater than two byts. #57. #56.
|
||||||
|
- Initial unit testing and test coverage. #53. #46. #19.
|
||||||
|
- Create Bower `component.json` #52.
|
||||||
|
|
||||||
|
## 0.1.8
|
||||||
|
|
||||||
|
- Add: some test case from node-o3-xml(excludes xpath support)
|
||||||
|
- Fix: remove existed attribute before setting (bug introduced in v0.1.5)
|
||||||
|
- Fix: index direct access for childNodes and any NodeList collection(not w3c standard)
|
||||||
|
- Fix: remove last child bug
|
||||||
8
tradeCattle/aiotagro-cattle-trade/node_modules/@xmldom/xmldom/LICENSE
generated
vendored
Normal file
8
tradeCattle/aiotagro-cattle-trade/node_modules/@xmldom/xmldom/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
Copyright 2019 - present Christopher J. Brody and other contributors, as listed in: https://github.com/xmldom/xmldom/graphs/contributors
|
||||||
|
Copyright 2012 - 2017 @jindw <jindw@xidea.org> and other contributors, as listed in: https://github.com/jindw/xmldom/graphs/contributors
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
50
tradeCattle/aiotagro-cattle-trade/node_modules/@xmldom/xmldom/SECURITY.md
generated
vendored
Normal file
50
tradeCattle/aiotagro-cattle-trade/node_modules/@xmldom/xmldom/SECURITY.md
generated
vendored
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
# Security Policy
|
||||||
|
|
||||||
|
The most up-to-date version of this document can be found at <https://github.com/xmldom/xmldom/security/policy>.
|
||||||
|
|
||||||
|
## Supported Versions
|
||||||
|
|
||||||
|
This repository contains the code for the libraries `xmldom` and `@xmldom/xmldom` on npm.
|
||||||
|
|
||||||
|
As long as we didn't publish v1, we aim to maintain the last two minor versions with security fixes. If it is possible we provide security fixes as patch versions.
|
||||||
|
If you think there is a good reason to also patch an earlier version, let us know in a GitHub issue or the release discussion once the fix has been provided.
|
||||||
|
The maintainers will consider it, and if we agree and have/find the required resources, a patch for that version will be provided.
|
||||||
|
|
||||||
|
Please notice that [we are no longer able to publish the (unscoped) `xmldom` package](https://github.com/xmldom/xmldom/issues/271),
|
||||||
|
and that all existing versions of `xmldom` are affected by at least one security vulnerability and should be considered deprecated.
|
||||||
|
You can still report issues regarding `xmldom` as described below.
|
||||||
|
|
||||||
|
If you need help with migrating from `xmldom` to `@xmldom/xmldom`, file a GitHub issue or PR in the affected repository and mention @karfau.
|
||||||
|
|
||||||
|
## Reporting vulnerabilities
|
||||||
|
|
||||||
|
Please email reports about any security related issues you find to `security@xmldom.org`, which will forward it to the list of maintainers.
|
||||||
|
The maintainers will try to respond within 7 calendar days. (If nobody replies after 7 days, please us send a reminder!)
|
||||||
|
As part of you communication please make sure to always hit "Reply all", so all maintainers are kept in the loop.
|
||||||
|
|
||||||
|
In addition, please include the following information along with your report:
|
||||||
|
|
||||||
|
- Your name and affiliation (if any).
|
||||||
|
- A description of the technical details of the vulnerabilities. It is very important to let us know how we can reproduce your findings.
|
||||||
|
- An explanation who can exploit this vulnerability, and what they gain when doing so -- write an attack scenario. This will help us evaluate your report quickly, especially if the issue is complex.
|
||||||
|
- Whether this vulnerability public or known to third parties. If it is, please provide details.
|
||||||
|
|
||||||
|
If you believe that an existing (public) issue is security-related, please email `security@xmldom.org`.
|
||||||
|
The email should include the issue URL and a short description of why it should be handled according to this security policy.
|
||||||
|
|
||||||
|
Once an issue is reported, the maintainers use the following disclosure process:
|
||||||
|
|
||||||
|
- When a report is received, we confirm the issue, determine its severity and the affected versions.
|
||||||
|
- If we know of specific third-party services or software based on xmldom that require mitigation before publication, those projects will be notified.
|
||||||
|
- A [GitHub security advisory](https://docs.github.com/en/code-security/security-advisories/about-github-security-advisories) is [created](https://docs.github.com/en/code-security/security-advisories/creating-a-security-advisory) (but not published) which details the problem and steps for mitigation.
|
||||||
|
- If the reporter provides a GitHub account and agrees to it, we [add that GitHub account as a collaborator on the advisory](https://docs.github.com/en/code-security/security-advisories/adding-a-collaborator-to-a-security-advisory).
|
||||||
|
- The vulnerability is fixed in a [private fork](https://docs.github.com/en/code-security/security-advisories/collaborating-in-a-temporary-private-fork-to-resolve-a-security-vulnerability) and potential workarounds are identified.
|
||||||
|
- The maintainers audit the existing code to find any potential similar problems.
|
||||||
|
- The release for the current minor version and the [security advisory are published](https://docs.github.com/en/code-security/security-advisories/publishing-a-security-advisory).
|
||||||
|
- The release(s) for previous minor version(s) are published.
|
||||||
|
|
||||||
|
We credit reporters for identifying security issues, if they confirm that they want to.
|
||||||
|
|
||||||
|
## Known vulnerabilities
|
||||||
|
|
||||||
|
See https://github.com/xmldom/xmldom/security/advisories?state=published
|
||||||
1620
tradeCattle/aiotagro-cattle-trade/node_modules/@xmldom/xmldom/index.d.ts
generated
vendored
Normal file
1620
tradeCattle/aiotagro-cattle-trade/node_modules/@xmldom/xmldom/index.d.ts
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
3
tradeCattle/aiotagro-cattle-trade/node_modules/@xmldom/xmldom/lib/.eslintrc.yml
generated
vendored
Normal file
3
tradeCattle/aiotagro-cattle-trade/node_modules/@xmldom/xmldom/lib/.eslintrc.yml
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
extends:
|
||||||
|
- 'plugin:es5/no-es2015'
|
||||||
|
- 'plugin:n/recommended'
|
||||||
429
tradeCattle/aiotagro-cattle-trade/node_modules/@xmldom/xmldom/lib/conventions.js
generated
vendored
Normal file
429
tradeCattle/aiotagro-cattle-trade/node_modules/@xmldom/xmldom/lib/conventions.js
generated
vendored
Normal file
@@ -0,0 +1,429 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Ponyfill for `Array.prototype.find` which is only available in ES6 runtimes.
|
||||||
|
*
|
||||||
|
* Works with anything that has a `length` property and index access properties,
|
||||||
|
* including NodeList.
|
||||||
|
*
|
||||||
|
* @param {T[] | { length: number; [number]: T }} list
|
||||||
|
* @param {function (item: T, index: number, list:T[]):boolean} predicate
|
||||||
|
* @param {Partial<Pick<ArrayConstructor['prototype'], 'find'>>?} ac
|
||||||
|
* Allows injecting a custom implementation in tests (`Array.prototype` by default).
|
||||||
|
* @returns {T | undefined}
|
||||||
|
* @template {unknown} T
|
||||||
|
* @see https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/find
|
||||||
|
* @see https://tc39.es/ecma262/multipage/indexed-collections.html#sec-array.prototype.find
|
||||||
|
*/
|
||||||
|
function find(list, predicate, ac) {
|
||||||
|
if (ac === undefined) {
|
||||||
|
ac = Array.prototype;
|
||||||
|
}
|
||||||
|
if (list && typeof ac.find === 'function') {
|
||||||
|
return ac.find.call(list, predicate);
|
||||||
|
}
|
||||||
|
for (var i = 0; i < list.length; i++) {
|
||||||
|
if (hasOwn(list, i)) {
|
||||||
|
var item = list[i];
|
||||||
|
if (predicate.call(undefined, item, i, list)) {
|
||||||
|
return item;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* "Shallow freezes" an object to render it immutable.
|
||||||
|
* Uses `Object.freeze` if available,
|
||||||
|
* otherwise the immutability is only in the type.
|
||||||
|
*
|
||||||
|
* Is used to create "enum like" objects.
|
||||||
|
*
|
||||||
|
* If `Object.getOwnPropertyDescriptors` is available,
|
||||||
|
* a new object with all properties of object but without any prototype is created and returned
|
||||||
|
* after freezing it.
|
||||||
|
*
|
||||||
|
* @param {T} object
|
||||||
|
* The object to freeze.
|
||||||
|
* @param {Pick<ObjectConstructor, 'create' | 'freeze' | 'getOwnPropertyDescriptors'>} [oc=Object]
|
||||||
|
* `Object` by default,
|
||||||
|
* allows to inject custom object constructor for tests.
|
||||||
|
* @returns {Readonly<T>}
|
||||||
|
* @template {Object} T
|
||||||
|
* @see https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/freeze
|
||||||
|
* @prettierignore
|
||||||
|
*/
|
||||||
|
function freeze(object, oc) {
|
||||||
|
if (oc === undefined) {
|
||||||
|
oc = Object;
|
||||||
|
}
|
||||||
|
if (oc && typeof oc.getOwnPropertyDescriptors === 'function') {
|
||||||
|
object = oc.create(null, oc.getOwnPropertyDescriptors(object));
|
||||||
|
}
|
||||||
|
return oc && typeof oc.freeze === 'function' ? oc.freeze(object) : object;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Implementation for `Object.hasOwn` but ES5 compatible.
|
||||||
|
*
|
||||||
|
* @param {any} object
|
||||||
|
* @param {string | number} key
|
||||||
|
* @returns {boolean}
|
||||||
|
*/
|
||||||
|
function hasOwn(object, key) {
|
||||||
|
return Object.prototype.hasOwnProperty.call(object, key);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Since xmldom can not rely on `Object.assign`,
|
||||||
|
* it uses/provides a simplified version that is sufficient for its needs.
|
||||||
|
*
|
||||||
|
* @param {Object} target
|
||||||
|
* @param {Object | null | undefined} source
|
||||||
|
* @returns {Object}
|
||||||
|
* The target with the merged/overridden properties.
|
||||||
|
* @throws {TypeError}
|
||||||
|
* If target is not an object.
|
||||||
|
* @see https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/assign
|
||||||
|
* @see https://tc39.es/ecma262/multipage/fundamental-objects.html#sec-object.assign
|
||||||
|
*/
|
||||||
|
function assign(target, source) {
|
||||||
|
if (target === null || typeof target !== 'object') {
|
||||||
|
throw new TypeError('target is not an object');
|
||||||
|
}
|
||||||
|
for (var key in source) {
|
||||||
|
if (hasOwn(source, key)) {
|
||||||
|
target[key] = source[key];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return target;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A number of attributes are boolean attributes.
|
||||||
|
* The presence of a boolean attribute on an element represents the `true` value,
|
||||||
|
* and the absence of the attribute represents the `false` value.
|
||||||
|
*
|
||||||
|
* If the attribute is present, its value must either be the empty string, or a value that is
|
||||||
|
* an ASCII case-insensitive match for the attribute's canonical name,
|
||||||
|
* with no leading or trailing whitespace.
|
||||||
|
*
|
||||||
|
* Note: The values `"true"` and `"false"` are not allowed on boolean attributes.
|
||||||
|
* To represent a `false` value, the attribute has to be omitted altogether.
|
||||||
|
*
|
||||||
|
* @see https://html.spec.whatwg.org/#boolean-attributes
|
||||||
|
* @see https://html.spec.whatwg.org/#attributes-3
|
||||||
|
*/
|
||||||
|
var HTML_BOOLEAN_ATTRIBUTES = freeze({
|
||||||
|
allowfullscreen: true,
|
||||||
|
async: true,
|
||||||
|
autofocus: true,
|
||||||
|
autoplay: true,
|
||||||
|
checked: true,
|
||||||
|
controls: true,
|
||||||
|
default: true,
|
||||||
|
defer: true,
|
||||||
|
disabled: true,
|
||||||
|
formnovalidate: true,
|
||||||
|
hidden: true,
|
||||||
|
ismap: true,
|
||||||
|
itemscope: true,
|
||||||
|
loop: true,
|
||||||
|
multiple: true,
|
||||||
|
muted: true,
|
||||||
|
nomodule: true,
|
||||||
|
novalidate: true,
|
||||||
|
open: true,
|
||||||
|
playsinline: true,
|
||||||
|
readonly: true,
|
||||||
|
required: true,
|
||||||
|
reversed: true,
|
||||||
|
selected: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if `name` is matching one of the HTML boolean attribute names.
|
||||||
|
* This method doesn't check if such attributes are allowed in the context of the current
|
||||||
|
* document/parsing.
|
||||||
|
*
|
||||||
|
* @param {string} name
|
||||||
|
* @returns {boolean}
|
||||||
|
* @see {@link HTML_BOOLEAN_ATTRIBUTES}
|
||||||
|
* @see https://html.spec.whatwg.org/#boolean-attributes
|
||||||
|
* @see https://html.spec.whatwg.org/#attributes-3
|
||||||
|
*/
|
||||||
|
function isHTMLBooleanAttribute(name) {
|
||||||
|
return hasOwn(HTML_BOOLEAN_ATTRIBUTES, name.toLowerCase());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Void elements only have a start tag; end tags must not be specified for void elements.
|
||||||
|
* These elements should be written as self-closing like this: `<area />`.
|
||||||
|
* This should not be confused with optional tags that HTML allows to omit the end tag for
|
||||||
|
* (like `li`, `tr` and others), which can have content after them,
|
||||||
|
* so they can not be written as self-closing.
|
||||||
|
* xmldom does not have any logic for optional end tags cases,
|
||||||
|
* and will report them as a warning.
|
||||||
|
* Content that would go into the unopened element,
|
||||||
|
* will instead be added as a sibling text node.
|
||||||
|
*
|
||||||
|
* @type {Readonly<{
|
||||||
|
* area: boolean;
|
||||||
|
* col: boolean;
|
||||||
|
* img: boolean;
|
||||||
|
* wbr: boolean;
|
||||||
|
* link: boolean;
|
||||||
|
* hr: boolean;
|
||||||
|
* source: boolean;
|
||||||
|
* br: boolean;
|
||||||
|
* input: boolean;
|
||||||
|
* param: boolean;
|
||||||
|
* meta: boolean;
|
||||||
|
* embed: boolean;
|
||||||
|
* track: boolean;
|
||||||
|
* base: boolean;
|
||||||
|
* }>}
|
||||||
|
* @see https://html.spec.whatwg.org/#void-elements
|
||||||
|
* @see https://html.spec.whatwg.org/#optional-tags
|
||||||
|
*/
|
||||||
|
var HTML_VOID_ELEMENTS = freeze({
|
||||||
|
area: true,
|
||||||
|
base: true,
|
||||||
|
br: true,
|
||||||
|
col: true,
|
||||||
|
embed: true,
|
||||||
|
hr: true,
|
||||||
|
img: true,
|
||||||
|
input: true,
|
||||||
|
link: true,
|
||||||
|
meta: true,
|
||||||
|
param: true,
|
||||||
|
source: true,
|
||||||
|
track: true,
|
||||||
|
wbr: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if `tagName` is matching one of the HTML void element names.
|
||||||
|
* This method doesn't check if such tags are allowed in the context of the current
|
||||||
|
* document/parsing.
|
||||||
|
*
|
||||||
|
* @param {string} tagName
|
||||||
|
* @returns {boolean}
|
||||||
|
* @see {@link HTML_VOID_ELEMENTS}
|
||||||
|
* @see https://html.spec.whatwg.org/#void-elements
|
||||||
|
*/
|
||||||
|
function isHTMLVoidElement(tagName) {
|
||||||
|
return hasOwn(HTML_VOID_ELEMENTS, tagName.toLowerCase());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tag names that are raw text elements according to HTML spec.
|
||||||
|
* The value denotes whether they are escapable or not.
|
||||||
|
*
|
||||||
|
* @see {@link isHTMLEscapableRawTextElement}
|
||||||
|
* @see {@link isHTMLRawTextElement}
|
||||||
|
* @see https://html.spec.whatwg.org/#raw-text-elements
|
||||||
|
* @see https://html.spec.whatwg.org/#escapable-raw-text-elements
|
||||||
|
*/
|
||||||
|
var HTML_RAW_TEXT_ELEMENTS = freeze({
|
||||||
|
script: false,
|
||||||
|
style: false,
|
||||||
|
textarea: true,
|
||||||
|
title: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if `tagName` is matching one of the HTML raw text element names.
|
||||||
|
* This method doesn't check if such tags are allowed in the context of the current
|
||||||
|
* document/parsing.
|
||||||
|
*
|
||||||
|
* @param {string} tagName
|
||||||
|
* @returns {boolean}
|
||||||
|
* @see {@link isHTMLEscapableRawTextElement}
|
||||||
|
* @see {@link HTML_RAW_TEXT_ELEMENTS}
|
||||||
|
* @see https://html.spec.whatwg.org/#raw-text-elements
|
||||||
|
* @see https://html.spec.whatwg.org/#escapable-raw-text-elements
|
||||||
|
*/
|
||||||
|
function isHTMLRawTextElement(tagName) {
|
||||||
|
var key = tagName.toLowerCase();
|
||||||
|
return hasOwn(HTML_RAW_TEXT_ELEMENTS, key) && !HTML_RAW_TEXT_ELEMENTS[key];
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Check if `tagName` is matching one of the HTML escapable raw text element names.
|
||||||
|
* This method doesn't check if such tags are allowed in the context of the current
|
||||||
|
* document/parsing.
|
||||||
|
*
|
||||||
|
* @param {string} tagName
|
||||||
|
* @returns {boolean}
|
||||||
|
* @see {@link isHTMLRawTextElement}
|
||||||
|
* @see {@link HTML_RAW_TEXT_ELEMENTS}
|
||||||
|
* @see https://html.spec.whatwg.org/#raw-text-elements
|
||||||
|
* @see https://html.spec.whatwg.org/#escapable-raw-text-elements
|
||||||
|
*/
|
||||||
|
function isHTMLEscapableRawTextElement(tagName) {
|
||||||
|
var key = tagName.toLowerCase();
|
||||||
|
return hasOwn(HTML_RAW_TEXT_ELEMENTS, key) && HTML_RAW_TEXT_ELEMENTS[key];
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Only returns true if `value` matches MIME_TYPE.HTML, which indicates an HTML document.
|
||||||
|
*
|
||||||
|
* @param {string} mimeType
|
||||||
|
* @returns {mimeType is 'text/html'}
|
||||||
|
* @see https://www.iana.org/assignments/media-types/text/html
|
||||||
|
* @see https://en.wikipedia.org/wiki/HTML
|
||||||
|
* @see https://developer.mozilla.org/en-US/docs/Web/API/DOMParser/parseFromString
|
||||||
|
* @see https://html.spec.whatwg.org/multipage/dynamic-markup-insertion.html#dom-domparser-parsefromstring
|
||||||
|
*/
|
||||||
|
function isHTMLMimeType(mimeType) {
|
||||||
|
return mimeType === MIME_TYPE.HTML;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* For both the `text/html` and the `application/xhtml+xml` namespace the spec defines that the
|
||||||
|
* HTML namespace is provided as the default.
|
||||||
|
*
|
||||||
|
* @param {string} mimeType
|
||||||
|
* @returns {boolean}
|
||||||
|
* @see https://dom.spec.whatwg.org/#dom-document-createelement
|
||||||
|
* @see https://dom.spec.whatwg.org/#dom-domimplementation-createdocument
|
||||||
|
* @see https://dom.spec.whatwg.org/#dom-domimplementation-createhtmldocument
|
||||||
|
*/
|
||||||
|
function hasDefaultHTMLNamespace(mimeType) {
|
||||||
|
return isHTMLMimeType(mimeType) || mimeType === MIME_TYPE.XML_XHTML_APPLICATION;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* All mime types that are allowed as input to `DOMParser.parseFromString`
|
||||||
|
*
|
||||||
|
* @see https://developer.mozilla.org/en-US/docs/Web/API/DOMParser/parseFromString#Argument02
|
||||||
|
* MDN
|
||||||
|
* @see https://html.spec.whatwg.org/multipage/dynamic-markup-insertion.html#domparsersupportedtype
|
||||||
|
* WHATWG HTML Spec
|
||||||
|
* @see {@link DOMParser.prototype.parseFromString}
|
||||||
|
*/
|
||||||
|
var MIME_TYPE = freeze({
|
||||||
|
/**
|
||||||
|
* `text/html`, the only mime type that triggers treating an XML document as HTML.
|
||||||
|
*
|
||||||
|
* @see https://www.iana.org/assignments/media-types/text/html IANA MimeType registration
|
||||||
|
* @see https://en.wikipedia.org/wiki/HTML Wikipedia
|
||||||
|
* @see https://developer.mozilla.org/en-US/docs/Web/API/DOMParser/parseFromString MDN
|
||||||
|
* @see https://html.spec.whatwg.org/multipage/dynamic-markup-insertion.html#dom-domparser-parsefromstring
|
||||||
|
* WHATWG HTML Spec
|
||||||
|
*/
|
||||||
|
HTML: 'text/html',
|
||||||
|
|
||||||
|
/**
|
||||||
|
* `application/xml`, the standard mime type for XML documents.
|
||||||
|
*
|
||||||
|
* @see https://www.iana.org/assignments/media-types/application/xml IANA MimeType
|
||||||
|
* registration
|
||||||
|
* @see https://tools.ietf.org/html/rfc7303#section-9.1 RFC 7303
|
||||||
|
* @see https://en.wikipedia.org/wiki/XML_and_MIME Wikipedia
|
||||||
|
*/
|
||||||
|
XML_APPLICATION: 'application/xml',
|
||||||
|
|
||||||
|
/**
|
||||||
|
* `text/xml`, an alias for `application/xml`.
|
||||||
|
*
|
||||||
|
* @see https://tools.ietf.org/html/rfc7303#section-9.2 RFC 7303
|
||||||
|
* @see https://www.iana.org/assignments/media-types/text/xml IANA MimeType registration
|
||||||
|
* @see https://en.wikipedia.org/wiki/XML_and_MIME Wikipedia
|
||||||
|
*/
|
||||||
|
XML_TEXT: 'text/xml',
|
||||||
|
|
||||||
|
/**
|
||||||
|
* `application/xhtml+xml`, indicates an XML document that has the default HTML namespace,
|
||||||
|
* but is parsed as an XML document.
|
||||||
|
*
|
||||||
|
* @see https://www.iana.org/assignments/media-types/application/xhtml+xml IANA MimeType
|
||||||
|
* registration
|
||||||
|
* @see https://dom.spec.whatwg.org/#dom-domimplementation-createdocument WHATWG DOM Spec
|
||||||
|
* @see https://en.wikipedia.org/wiki/XHTML Wikipedia
|
||||||
|
*/
|
||||||
|
XML_XHTML_APPLICATION: 'application/xhtml+xml',
|
||||||
|
|
||||||
|
/**
|
||||||
|
* `image/svg+xml`,
|
||||||
|
*
|
||||||
|
* @see https://www.iana.org/assignments/media-types/image/svg+xml IANA MimeType registration
|
||||||
|
* @see https://www.w3.org/TR/SVG11/ W3C SVG 1.1
|
||||||
|
* @see https://en.wikipedia.org/wiki/Scalable_Vector_Graphics Wikipedia
|
||||||
|
*/
|
||||||
|
XML_SVG_IMAGE: 'image/svg+xml',
|
||||||
|
});
|
||||||
|
/**
|
||||||
|
* @typedef {'application/xhtml+xml' | 'application/xml' | 'image/svg+xml' | 'text/html' | 'text/xml'}
|
||||||
|
* MimeType
|
||||||
|
*/
|
||||||
|
/**
|
||||||
|
* @type {MimeType[]}
|
||||||
|
* @private
|
||||||
|
* Basically `Object.values`, which is not available in ES5.
|
||||||
|
*/
|
||||||
|
var _MIME_TYPES = Object.keys(MIME_TYPE).map(function (key) {
|
||||||
|
return MIME_TYPE[key];
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Only returns true if `mimeType` is one of the allowed values for
|
||||||
|
* `DOMParser.parseFromString`.
|
||||||
|
*
|
||||||
|
* @param {string} mimeType
|
||||||
|
* @returns {mimeType is 'application/xhtml+xml' | 'application/xml' | 'image/svg+xml' | 'text/html' | 'text/xml'}
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
function isValidMimeType(mimeType) {
|
||||||
|
return _MIME_TYPES.indexOf(mimeType) > -1;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Namespaces that are used in this code base.
|
||||||
|
*
|
||||||
|
* @see http://www.w3.org/TR/REC-xml-names
|
||||||
|
*/
|
||||||
|
var NAMESPACE = freeze({
|
||||||
|
/**
|
||||||
|
* The XHTML namespace.
|
||||||
|
*
|
||||||
|
* @see http://www.w3.org/1999/xhtml
|
||||||
|
*/
|
||||||
|
HTML: 'http://www.w3.org/1999/xhtml',
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The SVG namespace.
|
||||||
|
*
|
||||||
|
* @see http://www.w3.org/2000/svg
|
||||||
|
*/
|
||||||
|
SVG: 'http://www.w3.org/2000/svg',
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The `xml:` namespace.
|
||||||
|
*
|
||||||
|
* @see http://www.w3.org/XML/1998/namespace
|
||||||
|
*/
|
||||||
|
XML: 'http://www.w3.org/XML/1998/namespace',
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The `xmlns:` namespace.
|
||||||
|
*
|
||||||
|
* @see https://www.w3.org/2000/xmlns/
|
||||||
|
*/
|
||||||
|
XMLNS: 'http://www.w3.org/2000/xmlns/',
|
||||||
|
});
|
||||||
|
|
||||||
|
exports.assign = assign;
|
||||||
|
exports.find = find;
|
||||||
|
exports.freeze = freeze;
|
||||||
|
exports.HTML_BOOLEAN_ATTRIBUTES = HTML_BOOLEAN_ATTRIBUTES;
|
||||||
|
exports.HTML_RAW_TEXT_ELEMENTS = HTML_RAW_TEXT_ELEMENTS;
|
||||||
|
exports.HTML_VOID_ELEMENTS = HTML_VOID_ELEMENTS;
|
||||||
|
exports.hasDefaultHTMLNamespace = hasDefaultHTMLNamespace;
|
||||||
|
exports.hasOwn = hasOwn;
|
||||||
|
exports.isHTMLBooleanAttribute = isHTMLBooleanAttribute;
|
||||||
|
exports.isHTMLRawTextElement = isHTMLRawTextElement;
|
||||||
|
exports.isHTMLEscapableRawTextElement = isHTMLEscapableRawTextElement;
|
||||||
|
exports.isHTMLMimeType = isHTMLMimeType;
|
||||||
|
exports.isHTMLVoidElement = isHTMLVoidElement;
|
||||||
|
exports.isValidMimeType = isValidMimeType;
|
||||||
|
exports.MIME_TYPE = MIME_TYPE;
|
||||||
|
exports.NAMESPACE = NAMESPACE;
|
||||||
586
tradeCattle/aiotagro-cattle-trade/node_modules/@xmldom/xmldom/lib/dom-parser.js
generated
vendored
Normal file
586
tradeCattle/aiotagro-cattle-trade/node_modules/@xmldom/xmldom/lib/dom-parser.js
generated
vendored
Normal file
@@ -0,0 +1,586 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
var conventions = require('./conventions');
|
||||||
|
var dom = require('./dom');
|
||||||
|
var errors = require('./errors');
|
||||||
|
var entities = require('./entities');
|
||||||
|
var sax = require('./sax');
|
||||||
|
|
||||||
|
var DOMImplementation = dom.DOMImplementation;
|
||||||
|
|
||||||
|
var hasDefaultHTMLNamespace = conventions.hasDefaultHTMLNamespace;
|
||||||
|
var isHTMLMimeType = conventions.isHTMLMimeType;
|
||||||
|
var isValidMimeType = conventions.isValidMimeType;
|
||||||
|
var MIME_TYPE = conventions.MIME_TYPE;
|
||||||
|
var NAMESPACE = conventions.NAMESPACE;
|
||||||
|
var ParseError = errors.ParseError;
|
||||||
|
|
||||||
|
var XMLReader = sax.XMLReader;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Normalizes line ending according to <https://www.w3.org/TR/xml11/#sec-line-ends>,
|
||||||
|
* including some Unicode "newline" characters:
|
||||||
|
*
|
||||||
|
* > XML parsed entities are often stored in computer files which,
|
||||||
|
* > for editing convenience, are organized into lines.
|
||||||
|
* > These lines are typically separated by some combination
|
||||||
|
* > of the characters CARRIAGE RETURN (#xD) and LINE FEED (#xA).
|
||||||
|
* >
|
||||||
|
* > To simplify the tasks of applications, the XML processor must behave
|
||||||
|
* > as if it normalized all line breaks in external parsed entities (including the document entity)
|
||||||
|
* > on input, before parsing, by translating the following to a single #xA character:
|
||||||
|
* >
|
||||||
|
* > 1. the two-character sequence #xD #xA,
|
||||||
|
* > 2. the two-character sequence #xD #x85,
|
||||||
|
* > 3. the single character #x85,
|
||||||
|
* > 4. the single character #x2028,
|
||||||
|
* > 5. the single character #x2029,
|
||||||
|
* > 6. any #xD character that is not immediately followed by #xA or #x85.
|
||||||
|
*
|
||||||
|
* @param {string} input
|
||||||
|
* @returns {string}
|
||||||
|
* @prettierignore
|
||||||
|
*/
|
||||||
|
function normalizeLineEndings(input) {
|
||||||
|
return input.replace(/\r[\n\u0085]/g, '\n').replace(/[\r\u0085\u2028\u2029]/g, '\n');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @typedef Locator
|
||||||
|
* @property {number} [columnNumber]
|
||||||
|
* @property {number} [lineNumber]
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @typedef DOMParserOptions
|
||||||
|
* @property {typeof assign} [assign]
|
||||||
|
* The method to use instead of `conventions.assign`, which is used to copy values from
|
||||||
|
* `options` before they are used for parsing.
|
||||||
|
* @property {typeof DOMHandler} [domHandler]
|
||||||
|
* For internal testing: The class for creating an instance for handling events from the SAX
|
||||||
|
* parser.
|
||||||
|
* *****Warning: By configuring a faulty implementation, the specified behavior can completely
|
||||||
|
* be broken.*****.
|
||||||
|
* @property {Function} [errorHandler]
|
||||||
|
* DEPRECATED! use `onError` instead.
|
||||||
|
* @property {function(level:ErrorLevel, message:string, context: DOMHandler):void}
|
||||||
|
* [onError]
|
||||||
|
* A function invoked for every error that occurs during parsing.
|
||||||
|
*
|
||||||
|
* If it is not provided, all errors are reported to `console.error`
|
||||||
|
* and only `fatalError`s are thrown as a `ParseError`,
|
||||||
|
* which prevents any further processing.
|
||||||
|
* If the provided method throws, a `ParserError` is thrown,
|
||||||
|
* which prevents any further processing.
|
||||||
|
*
|
||||||
|
* Be aware that many `warning`s are considered an error that prevents further processing in
|
||||||
|
* most implementations.
|
||||||
|
* @property {boolean} [locator=true]
|
||||||
|
* Configures if the nodes created during parsing will have a `lineNumber` and a `columnNumber`
|
||||||
|
* attribute describing their location in the XML string.
|
||||||
|
* Default is true.
|
||||||
|
* @property {(string) => string} [normalizeLineEndings]
|
||||||
|
* used to replace line endings before parsing, defaults to exported `normalizeLineEndings`,
|
||||||
|
* which normalizes line endings according to <https://www.w3.org/TR/xml11/#sec-line-ends>,
|
||||||
|
* including some Unicode "newline" characters.
|
||||||
|
* @property {Object} [xmlns]
|
||||||
|
* The XML namespaces that should be assumed when parsing.
|
||||||
|
* The default namespace can be provided by the key that is the empty string.
|
||||||
|
* When the `mimeType` for HTML, XHTML or SVG are passed to `parseFromString`,
|
||||||
|
* the default namespace that will be used,
|
||||||
|
* will be overridden according to the specification.
|
||||||
|
* @see {@link normalizeLineEndings}
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The DOMParser interface provides the ability to parse XML or HTML source code from a string
|
||||||
|
* into a DOM `Document`.
|
||||||
|
*
|
||||||
|
* ***xmldom is different from the spec in that it allows an `options` parameter,
|
||||||
|
* to control the behavior***.
|
||||||
|
*
|
||||||
|
* @class
|
||||||
|
* @param {DOMParserOptions} [options]
|
||||||
|
* @see https://developer.mozilla.org/en-US/docs/Web/API/DOMParser
|
||||||
|
* @see https://html.spec.whatwg.org/multipage/dynamic-markup-insertion.html#dom-parsing-and-serialization
|
||||||
|
*/
|
||||||
|
function DOMParser(options) {
|
||||||
|
options = options || {};
|
||||||
|
if (options.locator === undefined) {
|
||||||
|
options.locator = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The method to use instead of `conventions.assign`, which is used to copy values from
|
||||||
|
* `options`
|
||||||
|
* before they are used for parsing.
|
||||||
|
*
|
||||||
|
* @type {conventions.assign}
|
||||||
|
* @private
|
||||||
|
* @see {@link conventions.assign}
|
||||||
|
* @readonly
|
||||||
|
*/
|
||||||
|
this.assign = options.assign || conventions.assign;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* For internal testing: The class for creating an instance for handling events from the SAX
|
||||||
|
* parser.
|
||||||
|
* *****Warning: By configuring a faulty implementation, the specified behavior can completely
|
||||||
|
* be broken*****.
|
||||||
|
*
|
||||||
|
* @type {typeof DOMHandler}
|
||||||
|
* @private
|
||||||
|
* @readonly
|
||||||
|
*/
|
||||||
|
this.domHandler = options.domHandler || DOMHandler;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A function that is invoked for every error that occurs during parsing.
|
||||||
|
*
|
||||||
|
* If it is not provided, all errors are reported to `console.error`
|
||||||
|
* and only `fatalError`s are thrown as a `ParseError`,
|
||||||
|
* which prevents any further processing.
|
||||||
|
* If the provided method throws, a `ParserError` is thrown,
|
||||||
|
* which prevents any further processing.
|
||||||
|
*
|
||||||
|
* Be aware that many `warning`s are considered an error that prevents further processing in
|
||||||
|
* most implementations.
|
||||||
|
*
|
||||||
|
* @type {function(level:ErrorLevel, message:string, context: DOMHandler):void}
|
||||||
|
* @see {@link onErrorStopParsing}
|
||||||
|
* @see {@link onWarningStopParsing}
|
||||||
|
*/
|
||||||
|
this.onError = options.onError || options.errorHandler;
|
||||||
|
if (options.errorHandler && typeof options.errorHandler !== 'function') {
|
||||||
|
throw new TypeError('errorHandler object is no longer supported, switch to onError!');
|
||||||
|
} else if (options.errorHandler) {
|
||||||
|
options.errorHandler('warning', 'The `errorHandler` option has been deprecated, use `onError` instead!', this);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* used to replace line endings before parsing, defaults to `normalizeLineEndings`
|
||||||
|
*
|
||||||
|
* @type {(string) => string}
|
||||||
|
* @readonly
|
||||||
|
*/
|
||||||
|
this.normalizeLineEndings = options.normalizeLineEndings || normalizeLineEndings;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Configures if the nodes created during parsing will have a `lineNumber` and a
|
||||||
|
* `columnNumber`
|
||||||
|
* attribute describing their location in the XML string.
|
||||||
|
* Default is true.
|
||||||
|
*
|
||||||
|
* @type {boolean}
|
||||||
|
* @readonly
|
||||||
|
*/
|
||||||
|
this.locator = !!options.locator;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The default namespace can be provided by the key that is the empty string.
|
||||||
|
* When the `mimeType` for HTML, XHTML or SVG are passed to `parseFromString`,
|
||||||
|
* the default namespace that will be used,
|
||||||
|
* will be overridden according to the specification.
|
||||||
|
*
|
||||||
|
* @type {Readonly<Object>}
|
||||||
|
* @readonly
|
||||||
|
*/
|
||||||
|
this.xmlns = this.assign(Object.create(null), options.xmlns);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses `source` using the options in the way configured by the `DOMParserOptions` of `this`
|
||||||
|
* `DOMParser`. If `mimeType` is `text/html` an HTML `Document` is created,
|
||||||
|
* otherwise an XML `Document` is created.
|
||||||
|
*
|
||||||
|
* __It behaves different from the description in the living standard__:
|
||||||
|
* - Uses the `options` passed to the `DOMParser` constructor to modify the behavior.
|
||||||
|
* - Any unexpected input is reported to `onError` with either a `warning`,
|
||||||
|
* `error` or `fatalError` level.
|
||||||
|
* - Any `fatalError` throws a `ParseError` which prevents further processing.
|
||||||
|
* - Any error thrown by `onError` is converted to a `ParseError` which prevents further
|
||||||
|
* processing - If no `Document` was created during parsing it is reported as a `fatalError`.
|
||||||
|
* *****Warning: By configuring a faulty DOMHandler implementation,
|
||||||
|
* the specified behavior can completely be broken*****.
|
||||||
|
*
|
||||||
|
* @param {string} source
|
||||||
|
* The XML mime type only allows string input!
|
||||||
|
* @param {string} [mimeType='application/xml']
|
||||||
|
* the mimeType or contentType of the document to be created determines the `type` of document
|
||||||
|
* created (XML or HTML)
|
||||||
|
* @returns {Document}
|
||||||
|
* The `Document` node.
|
||||||
|
* @throws {ParseError}
|
||||||
|
* for any `fatalError` or anything that is thrown by `onError`
|
||||||
|
* @throws {TypeError}
|
||||||
|
* for any invalid `mimeType`
|
||||||
|
* @see https://developer.mozilla.org/en-US/docs/Web/API/DOMParser/parseFromString
|
||||||
|
* @see https://html.spec.whatwg.org/#dom-domparser-parsefromstring-dev
|
||||||
|
*/
|
||||||
|
DOMParser.prototype.parseFromString = function (source, mimeType) {
|
||||||
|
if (!isValidMimeType(mimeType)) {
|
||||||
|
throw new TypeError('DOMParser.parseFromString: the provided mimeType "' + mimeType + '" is not valid.');
|
||||||
|
}
|
||||||
|
var defaultNSMap = this.assign(Object.create(null), this.xmlns);
|
||||||
|
var entityMap = entities.XML_ENTITIES;
|
||||||
|
var defaultNamespace = defaultNSMap[''] || null;
|
||||||
|
if (hasDefaultHTMLNamespace(mimeType)) {
|
||||||
|
entityMap = entities.HTML_ENTITIES;
|
||||||
|
defaultNamespace = NAMESPACE.HTML;
|
||||||
|
} else if (mimeType === MIME_TYPE.XML_SVG_IMAGE) {
|
||||||
|
defaultNamespace = NAMESPACE.SVG;
|
||||||
|
}
|
||||||
|
defaultNSMap[''] = defaultNamespace;
|
||||||
|
defaultNSMap.xml = defaultNSMap.xml || NAMESPACE.XML;
|
||||||
|
|
||||||
|
var domBuilder = new this.domHandler({
|
||||||
|
mimeType: mimeType,
|
||||||
|
defaultNamespace: defaultNamespace,
|
||||||
|
onError: this.onError,
|
||||||
|
});
|
||||||
|
var locator = this.locator ? {} : undefined;
|
||||||
|
if (this.locator) {
|
||||||
|
domBuilder.setDocumentLocator(locator);
|
||||||
|
}
|
||||||
|
|
||||||
|
var sax = new XMLReader();
|
||||||
|
sax.errorHandler = domBuilder;
|
||||||
|
sax.domBuilder = domBuilder;
|
||||||
|
var isXml = !conventions.isHTMLMimeType(mimeType);
|
||||||
|
if (isXml && typeof source !== 'string') {
|
||||||
|
sax.errorHandler.fatalError('source is not a string');
|
||||||
|
}
|
||||||
|
sax.parse(this.normalizeLineEndings(String(source)), defaultNSMap, entityMap);
|
||||||
|
if (!domBuilder.doc.documentElement) {
|
||||||
|
sax.errorHandler.fatalError('missing root element');
|
||||||
|
}
|
||||||
|
return domBuilder.doc;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @typedef DOMHandlerOptions
|
||||||
|
* @property {string} [mimeType=MIME_TYPE.XML_APPLICATION]
|
||||||
|
* @property {string | null} [defaultNamespace=null]
|
||||||
|
*/
|
||||||
|
/**
|
||||||
|
* The class that is used to handle events from the SAX parser to create the related DOM
|
||||||
|
* elements.
|
||||||
|
*
|
||||||
|
* Some methods are only implemented as an empty function,
|
||||||
|
* since they are (at least currently) not relevant for xmldom.
|
||||||
|
*
|
||||||
|
* @class
|
||||||
|
* @param {DOMHandlerOptions} [options]
|
||||||
|
* @see http://www.saxproject.org/apidoc/org/xml/sax/ext/DefaultHandler2.html
|
||||||
|
*/
|
||||||
|
function DOMHandler(options) {
|
||||||
|
var opt = options || {};
|
||||||
|
/**
|
||||||
|
* The mime type is used to determine if the DOM handler will create an XML or HTML document.
|
||||||
|
* Only if it is set to `text/html` it will create an HTML document.
|
||||||
|
* It defaults to MIME_TYPE.XML_APPLICATION.
|
||||||
|
*
|
||||||
|
* @type {string}
|
||||||
|
* @see {@link MIME_TYPE}
|
||||||
|
* @readonly
|
||||||
|
*/
|
||||||
|
this.mimeType = opt.mimeType || MIME_TYPE.XML_APPLICATION;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The namespace to use to create an XML document.
|
||||||
|
* For the following reasons this is required:
|
||||||
|
* - The SAX API for `startDocument` doesn't offer any way to pass a namespace,
|
||||||
|
* since at that point there is no way for the parser to know what the default namespace from
|
||||||
|
* the document will be.
|
||||||
|
* - When creating using `DOMImplementation.createDocument` it is required to pass a
|
||||||
|
* namespace,
|
||||||
|
* to determine the correct `Document.contentType`, which should match `this.mimeType`.
|
||||||
|
* - When parsing an XML document with the `application/xhtml+xml` mimeType,
|
||||||
|
* the HTML namespace needs to be the default namespace.
|
||||||
|
*
|
||||||
|
* @type {string | null}
|
||||||
|
* @private
|
||||||
|
* @readonly
|
||||||
|
*/
|
||||||
|
this.defaultNamespace = opt.defaultNamespace || null;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @type {boolean}
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
this.cdata = false;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The last `Element` that was created by `startElement`.
|
||||||
|
* `endElement` sets it to the `currentElement.parentNode`.
|
||||||
|
*
|
||||||
|
* Note: The sax parser currently sets it to white space text nodes between tags.
|
||||||
|
*
|
||||||
|
* @type {Element | Node | undefined}
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
this.currentElement = undefined;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The Document that is created as part of `startDocument`,
|
||||||
|
* and returned by `DOMParser.parseFromString`.
|
||||||
|
*
|
||||||
|
* @type {Document | undefined}
|
||||||
|
* @readonly
|
||||||
|
*/
|
||||||
|
this.doc = undefined;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The locator is stored as part of setDocumentLocator.
|
||||||
|
* It is controlled and mutated by the SAX parser to store the current parsing position.
|
||||||
|
* It is used by DOMHandler to set `columnNumber` and `lineNumber`
|
||||||
|
* on the DOM nodes.
|
||||||
|
*
|
||||||
|
* @type {Readonly<Locator> | undefined}
|
||||||
|
* @private
|
||||||
|
* @readonly (the
|
||||||
|
* sax parser currently sometimes set's it)
|
||||||
|
*/
|
||||||
|
this.locator = undefined;
|
||||||
|
/**
|
||||||
|
* @type {function (level:ErrorLevel ,message:string, context:DOMHandler):void}
|
||||||
|
* @readonly
|
||||||
|
*/
|
||||||
|
this.onError = opt.onError;
|
||||||
|
}
|
||||||
|
|
||||||
|
function position(locator, node) {
|
||||||
|
node.lineNumber = locator.lineNumber;
|
||||||
|
node.columnNumber = locator.columnNumber;
|
||||||
|
}
|
||||||
|
|
||||||
|
DOMHandler.prototype = {
|
||||||
|
/**
|
||||||
|
* Either creates an XML or an HTML document and stores it under `this.doc`.
|
||||||
|
* If it is an XML document, `this.defaultNamespace` is used to create it,
|
||||||
|
* and it will not contain any `childNodes`.
|
||||||
|
* If it is an HTML document, it will be created without any `childNodes`.
|
||||||
|
*
|
||||||
|
* @see http://www.saxproject.org/apidoc/org/xml/sax/ContentHandler.html
|
||||||
|
*/
|
||||||
|
startDocument: function () {
|
||||||
|
var impl = new DOMImplementation();
|
||||||
|
this.doc = isHTMLMimeType(this.mimeType) ? impl.createHTMLDocument(false) : impl.createDocument(this.defaultNamespace, '');
|
||||||
|
},
|
||||||
|
startElement: function (namespaceURI, localName, qName, attrs) {
|
||||||
|
var doc = this.doc;
|
||||||
|
var el = doc.createElementNS(namespaceURI, qName || localName);
|
||||||
|
var len = attrs.length;
|
||||||
|
appendElement(this, el);
|
||||||
|
this.currentElement = el;
|
||||||
|
|
||||||
|
this.locator && position(this.locator, el);
|
||||||
|
for (var i = 0; i < len; i++) {
|
||||||
|
var namespaceURI = attrs.getURI(i);
|
||||||
|
var value = attrs.getValue(i);
|
||||||
|
var qName = attrs.getQName(i);
|
||||||
|
var attr = doc.createAttributeNS(namespaceURI, qName);
|
||||||
|
this.locator && position(attrs.getLocator(i), attr);
|
||||||
|
attr.value = attr.nodeValue = value;
|
||||||
|
el.setAttributeNode(attr);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
endElement: function (namespaceURI, localName, qName) {
|
||||||
|
this.currentElement = this.currentElement.parentNode;
|
||||||
|
},
|
||||||
|
startPrefixMapping: function (prefix, uri) {},
|
||||||
|
endPrefixMapping: function (prefix) {},
|
||||||
|
processingInstruction: function (target, data) {
|
||||||
|
var ins = this.doc.createProcessingInstruction(target, data);
|
||||||
|
this.locator && position(this.locator, ins);
|
||||||
|
appendElement(this, ins);
|
||||||
|
},
|
||||||
|
ignorableWhitespace: function (ch, start, length) {},
|
||||||
|
characters: function (chars, start, length) {
|
||||||
|
chars = _toString.apply(this, arguments);
|
||||||
|
//console.log(chars)
|
||||||
|
if (chars) {
|
||||||
|
if (this.cdata) {
|
||||||
|
var charNode = this.doc.createCDATASection(chars);
|
||||||
|
} else {
|
||||||
|
var charNode = this.doc.createTextNode(chars);
|
||||||
|
}
|
||||||
|
if (this.currentElement) {
|
||||||
|
this.currentElement.appendChild(charNode);
|
||||||
|
} else if (/^\s*$/.test(chars)) {
|
||||||
|
this.doc.appendChild(charNode);
|
||||||
|
//process xml
|
||||||
|
}
|
||||||
|
this.locator && position(this.locator, charNode);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
skippedEntity: function (name) {},
|
||||||
|
endDocument: function () {
|
||||||
|
this.doc.normalize();
|
||||||
|
},
|
||||||
|
/**
|
||||||
|
* Stores the locator to be able to set the `columnNumber` and `lineNumber`
|
||||||
|
* on the created DOM nodes.
|
||||||
|
*
|
||||||
|
* @param {Locator} locator
|
||||||
|
*/
|
||||||
|
setDocumentLocator: function (locator) {
|
||||||
|
if (locator) {
|
||||||
|
locator.lineNumber = 0;
|
||||||
|
}
|
||||||
|
this.locator = locator;
|
||||||
|
},
|
||||||
|
//LexicalHandler
|
||||||
|
comment: function (chars, start, length) {
|
||||||
|
chars = _toString.apply(this, arguments);
|
||||||
|
var comm = this.doc.createComment(chars);
|
||||||
|
this.locator && position(this.locator, comm);
|
||||||
|
appendElement(this, comm);
|
||||||
|
},
|
||||||
|
|
||||||
|
startCDATA: function () {
|
||||||
|
//used in characters() methods
|
||||||
|
this.cdata = true;
|
||||||
|
},
|
||||||
|
endCDATA: function () {
|
||||||
|
this.cdata = false;
|
||||||
|
},
|
||||||
|
|
||||||
|
startDTD: function (name, publicId, systemId, internalSubset) {
|
||||||
|
var impl = this.doc.implementation;
|
||||||
|
if (impl && impl.createDocumentType) {
|
||||||
|
var dt = impl.createDocumentType(name, publicId, systemId, internalSubset);
|
||||||
|
this.locator && position(this.locator, dt);
|
||||||
|
appendElement(this, dt);
|
||||||
|
this.doc.doctype = dt;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
reportError: function (level, message) {
|
||||||
|
if (typeof this.onError === 'function') {
|
||||||
|
try {
|
||||||
|
this.onError(level, message, this);
|
||||||
|
} catch (e) {
|
||||||
|
throw new ParseError('Reporting ' + level + ' "' + message + '" caused ' + e, this.locator);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
console.error('[xmldom ' + level + ']\t' + message, _locator(this.locator));
|
||||||
|
}
|
||||||
|
},
|
||||||
|
/**
|
||||||
|
* @see http://www.saxproject.org/apidoc/org/xml/sax/ErrorHandler.html
|
||||||
|
*/
|
||||||
|
warning: function (message) {
|
||||||
|
this.reportError('warning', message);
|
||||||
|
},
|
||||||
|
error: function (message) {
|
||||||
|
this.reportError('error', message);
|
||||||
|
},
|
||||||
|
/**
|
||||||
|
* This function reports a fatal error and throws a ParseError.
|
||||||
|
*
|
||||||
|
* @param {string} message
|
||||||
|
* - The message to be used for reporting and throwing the error.
|
||||||
|
* @returns {never}
|
||||||
|
* This function always throws an error and never returns a value.
|
||||||
|
* @throws {ParseError}
|
||||||
|
* Always throws a ParseError with the provided message.
|
||||||
|
*/
|
||||||
|
fatalError: function (message) {
|
||||||
|
this.reportError('fatalError', message);
|
||||||
|
throw new ParseError(message, this.locator);
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
function _locator(l) {
|
||||||
|
if (l) {
|
||||||
|
return '\n@#[line:' + l.lineNumber + ',col:' + l.columnNumber + ']';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function _toString(chars, start, length) {
|
||||||
|
if (typeof chars == 'string') {
|
||||||
|
return chars.substr(start, length);
|
||||||
|
} else {
|
||||||
|
//java sax connect width xmldom on rhino(what about: "? && !(chars instanceof String)")
|
||||||
|
if (chars.length >= start + length || start) {
|
||||||
|
return new java.lang.String(chars, start, length) + '';
|
||||||
|
}
|
||||||
|
return chars;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* @link http://www.saxproject.org/apidoc/org/xml/sax/ext/LexicalHandler.html
|
||||||
|
* used method of org.xml.sax.ext.LexicalHandler:
|
||||||
|
* #comment(chars, start, length)
|
||||||
|
* #startCDATA()
|
||||||
|
* #endCDATA()
|
||||||
|
* #startDTD(name, publicId, systemId)
|
||||||
|
*
|
||||||
|
*
|
||||||
|
* IGNORED method of org.xml.sax.ext.LexicalHandler:
|
||||||
|
* #endDTD()
|
||||||
|
* #startEntity(name)
|
||||||
|
* #endEntity(name)
|
||||||
|
*
|
||||||
|
*
|
||||||
|
* @link http://www.saxproject.org/apidoc/org/xml/sax/ext/DeclHandler.html
|
||||||
|
* IGNORED method of org.xml.sax.ext.DeclHandler
|
||||||
|
* #attributeDecl(eName, aName, type, mode, value)
|
||||||
|
* #elementDecl(name, model)
|
||||||
|
* #externalEntityDecl(name, publicId, systemId)
|
||||||
|
* #internalEntityDecl(name, value)
|
||||||
|
* @link http://www.saxproject.org/apidoc/org/xml/sax/ext/EntityResolver2.html
|
||||||
|
* IGNORED method of org.xml.sax.EntityResolver2
|
||||||
|
* #resolveEntity(String name,String publicId,String baseURI,String systemId)
|
||||||
|
* #resolveEntity(publicId, systemId)
|
||||||
|
* #getExternalSubset(name, baseURI)
|
||||||
|
* @link http://www.saxproject.org/apidoc/org/xml/sax/DTDHandler.html
|
||||||
|
* IGNORED method of org.xml.sax.DTDHandler
|
||||||
|
* #notationDecl(name, publicId, systemId) {};
|
||||||
|
* #unparsedEntityDecl(name, publicId, systemId, notationName) {};
|
||||||
|
*/
|
||||||
|
'endDTD,startEntity,endEntity,attributeDecl,elementDecl,externalEntityDecl,internalEntityDecl,resolveEntity,getExternalSubset,notationDecl,unparsedEntityDecl'.replace(
|
||||||
|
/\w+/g,
|
||||||
|
function (key) {
|
||||||
|
DOMHandler.prototype[key] = function () {
|
||||||
|
return null;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
/* Private static helpers treated below as private instance methods, so don't need to add these to the public API; we might use a Relator to also get rid of non-standard public properties */
|
||||||
|
function appendElement(handler, node) {
|
||||||
|
if (!handler.currentElement) {
|
||||||
|
handler.doc.appendChild(node);
|
||||||
|
} else {
|
||||||
|
handler.currentElement.appendChild(node);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A method that prevents any further parsing when an `error`
|
||||||
|
* with level `error` is reported during parsing.
|
||||||
|
*
|
||||||
|
* @see {@link DOMParserOptions.onError}
|
||||||
|
* @see {@link onWarningStopParsing}
|
||||||
|
*/
|
||||||
|
function onErrorStopParsing(level) {
|
||||||
|
if (level === 'error') throw 'onErrorStopParsing';
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A method that prevents any further parsing when any `error` is reported during parsing.
|
||||||
|
*
|
||||||
|
* @see {@link DOMParserOptions.onError}
|
||||||
|
* @see {@link onErrorStopParsing}
|
||||||
|
*/
|
||||||
|
function onWarningStopParsing() {
|
||||||
|
throw 'onWarningStopParsing';
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.__DOMHandler = DOMHandler;
|
||||||
|
exports.DOMParser = DOMParser;
|
||||||
|
exports.normalizeLineEndings = normalizeLineEndings;
|
||||||
|
exports.onErrorStopParsing = onErrorStopParsing;
|
||||||
|
exports.onWarningStopParsing = onWarningStopParsing;
|
||||||
3135
tradeCattle/aiotagro-cattle-trade/node_modules/@xmldom/xmldom/lib/dom.js
generated
vendored
Normal file
3135
tradeCattle/aiotagro-cattle-trade/node_modules/@xmldom/xmldom/lib/dom.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
2171
tradeCattle/aiotagro-cattle-trade/node_modules/@xmldom/xmldom/lib/entities.js
generated
vendored
Normal file
2171
tradeCattle/aiotagro-cattle-trade/node_modules/@xmldom/xmldom/lib/entities.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
202
tradeCattle/aiotagro-cattle-trade/node_modules/@xmldom/xmldom/lib/errors.js
generated
vendored
Normal file
202
tradeCattle/aiotagro-cattle-trade/node_modules/@xmldom/xmldom/lib/errors.js
generated
vendored
Normal file
@@ -0,0 +1,202 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
var conventions = require('./conventions');
|
||||||
|
|
||||||
|
function extendError(constructor, writableName) {
|
||||||
|
constructor.prototype = Object.create(Error.prototype, {
|
||||||
|
constructor: { value: constructor },
|
||||||
|
name: { value: constructor.name, enumerable: true, writable: writableName },
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
var DOMExceptionName = conventions.freeze({
|
||||||
|
/**
|
||||||
|
* the default value as defined by the spec
|
||||||
|
*/
|
||||||
|
Error: 'Error',
|
||||||
|
/**
|
||||||
|
* @deprecated
|
||||||
|
* Use RangeError instead.
|
||||||
|
*/
|
||||||
|
IndexSizeError: 'IndexSizeError',
|
||||||
|
/**
|
||||||
|
* @deprecated
|
||||||
|
* Just to match the related static code, not part of the spec.
|
||||||
|
*/
|
||||||
|
DomstringSizeError: 'DomstringSizeError',
|
||||||
|
HierarchyRequestError: 'HierarchyRequestError',
|
||||||
|
WrongDocumentError: 'WrongDocumentError',
|
||||||
|
InvalidCharacterError: 'InvalidCharacterError',
|
||||||
|
/**
|
||||||
|
* @deprecated
|
||||||
|
* Just to match the related static code, not part of the spec.
|
||||||
|
*/
|
||||||
|
NoDataAllowedError: 'NoDataAllowedError',
|
||||||
|
NoModificationAllowedError: 'NoModificationAllowedError',
|
||||||
|
NotFoundError: 'NotFoundError',
|
||||||
|
NotSupportedError: 'NotSupportedError',
|
||||||
|
InUseAttributeError: 'InUseAttributeError',
|
||||||
|
InvalidStateError: 'InvalidStateError',
|
||||||
|
SyntaxError: 'SyntaxError',
|
||||||
|
InvalidModificationError: 'InvalidModificationError',
|
||||||
|
NamespaceError: 'NamespaceError',
|
||||||
|
/**
|
||||||
|
* @deprecated
|
||||||
|
* Use TypeError for invalid arguments,
|
||||||
|
* "NotSupportedError" DOMException for unsupported operations,
|
||||||
|
* and "NotAllowedError" DOMException for denied requests instead.
|
||||||
|
*/
|
||||||
|
InvalidAccessError: 'InvalidAccessError',
|
||||||
|
/**
|
||||||
|
* @deprecated
|
||||||
|
* Just to match the related static code, not part of the spec.
|
||||||
|
*/
|
||||||
|
ValidationError: 'ValidationError',
|
||||||
|
/**
|
||||||
|
* @deprecated
|
||||||
|
* Use TypeError instead.
|
||||||
|
*/
|
||||||
|
TypeMismatchError: 'TypeMismatchError',
|
||||||
|
SecurityError: 'SecurityError',
|
||||||
|
NetworkError: 'NetworkError',
|
||||||
|
AbortError: 'AbortError',
|
||||||
|
/**
|
||||||
|
* @deprecated
|
||||||
|
* Just to match the related static code, not part of the spec.
|
||||||
|
*/
|
||||||
|
URLMismatchError: 'URLMismatchError',
|
||||||
|
QuotaExceededError: 'QuotaExceededError',
|
||||||
|
TimeoutError: 'TimeoutError',
|
||||||
|
InvalidNodeTypeError: 'InvalidNodeTypeError',
|
||||||
|
DataCloneError: 'DataCloneError',
|
||||||
|
EncodingError: 'EncodingError',
|
||||||
|
NotReadableError: 'NotReadableError',
|
||||||
|
UnknownError: 'UnknownError',
|
||||||
|
ConstraintError: 'ConstraintError',
|
||||||
|
DataError: 'DataError',
|
||||||
|
TransactionInactiveError: 'TransactionInactiveError',
|
||||||
|
ReadOnlyError: 'ReadOnlyError',
|
||||||
|
VersionError: 'VersionError',
|
||||||
|
OperationError: 'OperationError',
|
||||||
|
NotAllowedError: 'NotAllowedError',
|
||||||
|
OptOutError: 'OptOutError',
|
||||||
|
});
|
||||||
|
var DOMExceptionNames = Object.keys(DOMExceptionName);
|
||||||
|
|
||||||
|
function isValidDomExceptionCode(value) {
|
||||||
|
return typeof value === 'number' && value >= 1 && value <= 25;
|
||||||
|
}
|
||||||
|
function endsWithError(value) {
|
||||||
|
return typeof value === 'string' && value.substring(value.length - DOMExceptionName.Error.length) === DOMExceptionName.Error;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* DOM operations only raise exceptions in "exceptional" circumstances, i.e., when an operation
|
||||||
|
* is impossible to perform (either for logical reasons, because data is lost, or because the
|
||||||
|
* implementation has become unstable). In general, DOM methods return specific error values in
|
||||||
|
* ordinary processing situations, such as out-of-bound errors when using NodeList.
|
||||||
|
*
|
||||||
|
* Implementations should raise other exceptions under other circumstances. For example,
|
||||||
|
* implementations should raise an implementation-dependent exception if a null argument is
|
||||||
|
* passed when null was not expected.
|
||||||
|
*
|
||||||
|
* This implementation supports the following usages:
|
||||||
|
* 1. according to the living standard (both arguments are optional):
|
||||||
|
* ```
|
||||||
|
* new DOMException("message (can be empty)", DOMExceptionNames.HierarchyRequestError)
|
||||||
|
* ```
|
||||||
|
* 2. according to previous xmldom implementation (only the first argument is required):
|
||||||
|
* ```
|
||||||
|
* new DOMException(DOMException.HIERARCHY_REQUEST_ERR, "optional message")
|
||||||
|
* ```
|
||||||
|
* both result in the proper name being set.
|
||||||
|
*
|
||||||
|
* @class DOMException
|
||||||
|
* @param {number | string} messageOrCode
|
||||||
|
* The reason why an operation is not acceptable.
|
||||||
|
* If it is a number, it is used to determine the `name`, see
|
||||||
|
* {@link https://www.w3.org/TR/DOM-Level-3-Core/core.html#ID-258A00AF ExceptionCode}
|
||||||
|
* @param {string | keyof typeof DOMExceptionName | Error} [nameOrMessage]
|
||||||
|
* The `name` to use for the error.
|
||||||
|
* If `messageOrCode` is a number, this arguments is used as the `message` instead.
|
||||||
|
* @augments Error
|
||||||
|
* @see https://webidl.spec.whatwg.org/#idl-DOMException
|
||||||
|
* @see https://webidl.spec.whatwg.org/#dfn-error-names-table
|
||||||
|
* @see https://www.w3.org/TR/DOM-Level-3-Core/core.html#ID-17189187
|
||||||
|
* @see http://www.w3.org/TR/2000/REC-DOM-Level-2-Core-20001113/ecma-script-binding.html
|
||||||
|
* @see http://www.w3.org/TR/REC-DOM-Level-1/ecma-script-language-binding.html
|
||||||
|
*/
|
||||||
|
function DOMException(messageOrCode, nameOrMessage) {
|
||||||
|
// support old way of passing arguments: first argument is a valid number
|
||||||
|
if (isValidDomExceptionCode(messageOrCode)) {
|
||||||
|
this.name = DOMExceptionNames[messageOrCode];
|
||||||
|
this.message = nameOrMessage || '';
|
||||||
|
} else {
|
||||||
|
this.message = messageOrCode;
|
||||||
|
this.name = endsWithError(nameOrMessage) ? nameOrMessage : DOMExceptionName.Error;
|
||||||
|
}
|
||||||
|
if (Error.captureStackTrace) Error.captureStackTrace(this, DOMException);
|
||||||
|
}
|
||||||
|
extendError(DOMException, true);
|
||||||
|
Object.defineProperties(DOMException.prototype, {
|
||||||
|
code: {
|
||||||
|
enumerable: true,
|
||||||
|
get: function () {
|
||||||
|
var code = DOMExceptionNames.indexOf(this.name);
|
||||||
|
if (isValidDomExceptionCode(code)) return code;
|
||||||
|
return 0;
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
var ExceptionCode = {
|
||||||
|
INDEX_SIZE_ERR: 1,
|
||||||
|
DOMSTRING_SIZE_ERR: 2,
|
||||||
|
HIERARCHY_REQUEST_ERR: 3,
|
||||||
|
WRONG_DOCUMENT_ERR: 4,
|
||||||
|
INVALID_CHARACTER_ERR: 5,
|
||||||
|
NO_DATA_ALLOWED_ERR: 6,
|
||||||
|
NO_MODIFICATION_ALLOWED_ERR: 7,
|
||||||
|
NOT_FOUND_ERR: 8,
|
||||||
|
NOT_SUPPORTED_ERR: 9,
|
||||||
|
INUSE_ATTRIBUTE_ERR: 10,
|
||||||
|
INVALID_STATE_ERR: 11,
|
||||||
|
SYNTAX_ERR: 12,
|
||||||
|
INVALID_MODIFICATION_ERR: 13,
|
||||||
|
NAMESPACE_ERR: 14,
|
||||||
|
INVALID_ACCESS_ERR: 15,
|
||||||
|
VALIDATION_ERR: 16,
|
||||||
|
TYPE_MISMATCH_ERR: 17,
|
||||||
|
SECURITY_ERR: 18,
|
||||||
|
NETWORK_ERR: 19,
|
||||||
|
ABORT_ERR: 20,
|
||||||
|
URL_MISMATCH_ERR: 21,
|
||||||
|
QUOTA_EXCEEDED_ERR: 22,
|
||||||
|
TIMEOUT_ERR: 23,
|
||||||
|
INVALID_NODE_TYPE_ERR: 24,
|
||||||
|
DATA_CLONE_ERR: 25,
|
||||||
|
};
|
||||||
|
|
||||||
|
var entries = Object.entries(ExceptionCode);
|
||||||
|
for (var i = 0; i < entries.length; i++) {
|
||||||
|
var key = entries[i][0];
|
||||||
|
DOMException[key] = entries[i][1];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates an error that will not be caught by XMLReader aka the SAX parser.
|
||||||
|
*
|
||||||
|
* @class
|
||||||
|
* @param {string} message
|
||||||
|
* @param {any} [locator]
|
||||||
|
*/
|
||||||
|
function ParseError(message, locator) {
|
||||||
|
this.message = message;
|
||||||
|
this.locator = locator;
|
||||||
|
if (Error.captureStackTrace) Error.captureStackTrace(this, ParseError);
|
||||||
|
}
|
||||||
|
extendError(ParseError);
|
||||||
|
|
||||||
|
exports.DOMException = DOMException;
|
||||||
|
exports.DOMExceptionName = DOMExceptionName;
|
||||||
|
exports.ExceptionCode = ExceptionCode;
|
||||||
|
exports.ParseError = ParseError;
|
||||||
533
tradeCattle/aiotagro-cattle-trade/node_modules/@xmldom/xmldom/lib/grammar.js
generated
vendored
Normal file
533
tradeCattle/aiotagro-cattle-trade/node_modules/@xmldom/xmldom/lib/grammar.js
generated
vendored
Normal file
@@ -0,0 +1,533 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Detects relevant unicode support for regular expressions in the runtime.
|
||||||
|
* Should the runtime not accepts the flag `u` or unicode ranges,
|
||||||
|
* character classes without unicode handling will be used.
|
||||||
|
*
|
||||||
|
* @param {typeof RegExp} [RegExpImpl=RegExp]
|
||||||
|
* For testing: the RegExp class.
|
||||||
|
* @returns {boolean}
|
||||||
|
* @see https://node.green/#ES2015-syntax-RegExp--y--and--u--flags
|
||||||
|
*/
|
||||||
|
function detectUnicodeSupport(RegExpImpl) {
|
||||||
|
try {
|
||||||
|
if (typeof RegExpImpl !== 'function') {
|
||||||
|
RegExpImpl = RegExp;
|
||||||
|
}
|
||||||
|
// eslint-disable-next-line es5/no-unicode-regex,es5/no-unicode-code-point-escape
|
||||||
|
var match = new RegExpImpl('\u{1d306}', 'u').exec('𝌆');
|
||||||
|
return !!match && match[0].length === 2;
|
||||||
|
} catch (error) {}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
var UNICODE_SUPPORT = detectUnicodeSupport();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Removes `[`, `]` and any trailing quantifiers from the source of a RegExp.
|
||||||
|
*
|
||||||
|
* @param {RegExp} regexp
|
||||||
|
*/
|
||||||
|
function chars(regexp) {
|
||||||
|
if (regexp.source[0] !== '[') {
|
||||||
|
throw new Error(regexp + ' can not be used with chars');
|
||||||
|
}
|
||||||
|
return regexp.source.slice(1, regexp.source.lastIndexOf(']'));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a new character list regular expression,
|
||||||
|
* by removing `search` from the source of `regexp`.
|
||||||
|
*
|
||||||
|
* @param {RegExp} regexp
|
||||||
|
* @param {string} search
|
||||||
|
* The character(s) to remove.
|
||||||
|
* @returns {RegExp}
|
||||||
|
*/
|
||||||
|
function chars_without(regexp, search) {
|
||||||
|
if (regexp.source[0] !== '[') {
|
||||||
|
throw new Error('/' + regexp.source + '/ can not be used with chars_without');
|
||||||
|
}
|
||||||
|
if (!search || typeof search !== 'string') {
|
||||||
|
throw new Error(JSON.stringify(search) + ' is not a valid search');
|
||||||
|
}
|
||||||
|
if (regexp.source.indexOf(search) === -1) {
|
||||||
|
throw new Error('"' + search + '" is not is /' + regexp.source + '/');
|
||||||
|
}
|
||||||
|
if (search === '-' && regexp.source.indexOf(search) !== 1) {
|
||||||
|
throw new Error('"' + search + '" is not at the first postion of /' + regexp.source + '/');
|
||||||
|
}
|
||||||
|
return new RegExp(regexp.source.replace(search, ''), UNICODE_SUPPORT ? 'u' : '');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Combines and Regular expressions correctly by using `RegExp.source`.
|
||||||
|
*
|
||||||
|
* @param {...(RegExp | string)[]} args
|
||||||
|
* @returns {RegExp}
|
||||||
|
*/
|
||||||
|
function reg(args) {
|
||||||
|
var self = this;
|
||||||
|
return new RegExp(
|
||||||
|
Array.prototype.slice
|
||||||
|
.call(arguments)
|
||||||
|
.map(function (part) {
|
||||||
|
var isStr = typeof part === 'string';
|
||||||
|
if (isStr && self === undefined && part === '|') {
|
||||||
|
throw new Error('use regg instead of reg to wrap expressions with `|`!');
|
||||||
|
}
|
||||||
|
return isStr ? part : part.source;
|
||||||
|
})
|
||||||
|
.join(''),
|
||||||
|
UNICODE_SUPPORT ? 'mu' : 'm'
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Like `reg` but wraps the expression in `(?:`,`)` to create a non tracking group.
|
||||||
|
*
|
||||||
|
* @param {...(RegExp | string)[]} args
|
||||||
|
* @returns {RegExp}
|
||||||
|
*/
|
||||||
|
function regg(args) {
|
||||||
|
if (arguments.length === 0) {
|
||||||
|
throw new Error('no parameters provided');
|
||||||
|
}
|
||||||
|
return reg.apply(regg, ['(?:'].concat(Array.prototype.slice.call(arguments), [')']));
|
||||||
|
}
|
||||||
|
|
||||||
|
// /**
|
||||||
|
// * Append ^ to the beginning of the expression.
|
||||||
|
// * @param {...(RegExp | string)[]} args
|
||||||
|
// * @returns {RegExp}
|
||||||
|
// */
|
||||||
|
// function reg_start(args) {
|
||||||
|
// if (arguments.length === 0) {
|
||||||
|
// throw new Error('no parameters provided');
|
||||||
|
// }
|
||||||
|
// return reg.apply(reg_start, ['^'].concat(Array.prototype.slice.call(arguments)));
|
||||||
|
// }
|
||||||
|
|
||||||
|
// https://www.w3.org/TR/xml/#document
|
||||||
|
// `[1] document ::= prolog element Misc*`
|
||||||
|
// https://www.w3.org/TR/xml11/#NT-document
|
||||||
|
// `[1] document ::= ( prolog element Misc* ) - ( Char* RestrictedChar Char* )`
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A character usually appearing in wrongly converted strings.
|
||||||
|
*
|
||||||
|
* @type {string}
|
||||||
|
* @see https://en.wikipedia.org/wiki/Specials_(Unicode_block)#Replacement_character
|
||||||
|
* @see https://nodejs.dev/en/api/v18/buffer/#buffers-and-character-encodings
|
||||||
|
* @see https://www.unicode.org/faq/utf_bom.html#BOM
|
||||||
|
* @readonly
|
||||||
|
*/
|
||||||
|
var UNICODE_REPLACEMENT_CHARACTER = '\uFFFD';
|
||||||
|
// https://www.w3.org/TR/xml/#NT-Char
|
||||||
|
// any Unicode character, excluding the surrogate blocks, FFFE, and FFFF.
|
||||||
|
// `[2] Char ::= #x9 | #xA | #xD | [#x20-#xD7FF] | [#xE000-#xFFFD] | [#x10000-#x10FFFF]`
|
||||||
|
// https://www.w3.org/TR/xml11/#NT-Char
|
||||||
|
// `[2] Char ::= [#x1-#xD7FF] | [#xE000-#xFFFD] | [#x10000-#x10FFFF]`
|
||||||
|
// https://www.w3.org/TR/xml11/#NT-RestrictedChar
|
||||||
|
// `[2a] RestrictedChar ::= [#x1-#x8] | [#xB-#xC] | [#xE-#x1F] | [#x7F-#x84] | [#x86-#x9F]`
|
||||||
|
// https://www.w3.org/TR/xml11/#charsets
|
||||||
|
var Char = /[-\x09\x0A\x0D\x20-\x2C\x2E-\uD7FF\uE000-\uFFFD]/; // without \u10000-\uEFFFF
|
||||||
|
if (UNICODE_SUPPORT) {
|
||||||
|
// eslint-disable-next-line es5/no-unicode-code-point-escape
|
||||||
|
Char = reg('[', chars(Char), '\\u{10000}-\\u{10FFFF}', ']');
|
||||||
|
}
|
||||||
|
|
||||||
|
var _SChar = /[\x20\x09\x0D\x0A]/;
|
||||||
|
var SChar_s = chars(_SChar);
|
||||||
|
// https://www.w3.org/TR/xml11/#NT-S
|
||||||
|
// `[3] S ::= (#x20 | #x9 | #xD | #xA)+`
|
||||||
|
var S = reg(_SChar, '+');
|
||||||
|
// optional whitespace described as `S?` in the grammar,
|
||||||
|
// simplified to 0-n occurrences of the character class
|
||||||
|
// instead of 0-1 occurrences of a non-capturing group around S
|
||||||
|
var S_OPT = reg(_SChar, '*');
|
||||||
|
|
||||||
|
// https://www.w3.org/TR/xml11/#NT-NameStartChar
|
||||||
|
// `[4] NameStartChar ::= ":" | [A-Z] | "_" | [a-z] | [#xC0-#xD6] | [#xD8-#xF6] | [#xF8-#x2FF] | [#x370-#x37D] | [#x37F-#x1FFF] | [#x200C-#x200D] | [#x2070-#x218F] | [#x2C00-#x2FEF] | [#x3001-#xD7FF] | [#xF900-#xFDCF] | [#xFDF0-#xFFFD] | [#x10000-#xEFFFF]`
|
||||||
|
var NameStartChar =
|
||||||
|
/[:_a-zA-Z\xC0-\xD6\xD8-\xF6\xF8-\u02FF\u0370-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/; // without \u10000-\uEFFFF
|
||||||
|
if (UNICODE_SUPPORT) {
|
||||||
|
// eslint-disable-next-line es5/no-unicode-code-point-escape
|
||||||
|
NameStartChar = reg('[', chars(NameStartChar), '\\u{10000}-\\u{10FFFF}', ']');
|
||||||
|
}
|
||||||
|
var NameStartChar_s = chars(NameStartChar);
|
||||||
|
|
||||||
|
// https://www.w3.org/TR/xml11/#NT-NameChar
|
||||||
|
// `[4a] NameChar ::= NameStartChar | "-" | "." | [0-9] | #xB7 | [#x0300-#x036F] | [#x203F-#x2040]`
|
||||||
|
var NameChar = reg('[', NameStartChar_s, chars(/[-.0-9\xB7]/), chars(/[\u0300-\u036F\u203F-\u2040]/), ']');
|
||||||
|
// https://www.w3.org/TR/xml11/#NT-Name
|
||||||
|
// `[5] Name ::= NameStartChar (NameChar)*`
|
||||||
|
var Name = reg(NameStartChar, NameChar, '*');
|
||||||
|
/*
|
||||||
|
https://www.w3.org/TR/xml11/#NT-Names
|
||||||
|
`[6] Names ::= Name (#x20 Name)*`
|
||||||
|
*/
|
||||||
|
|
||||||
|
// https://www.w3.org/TR/xml11/#NT-Nmtoken
|
||||||
|
// `[7] Nmtoken ::= (NameChar)+`
|
||||||
|
var Nmtoken = reg(NameChar, '+');
|
||||||
|
/*
|
||||||
|
https://www.w3.org/TR/xml11/#NT-Nmtokens
|
||||||
|
`[8] Nmtokens ::= Nmtoken (#x20 Nmtoken)*`
|
||||||
|
var Nmtokens = reg(Nmtoken, regg(/\x20/, Nmtoken), '*');
|
||||||
|
*/
|
||||||
|
|
||||||
|
// https://www.w3.org/TR/xml11/#NT-EntityRef
|
||||||
|
// `[68] EntityRef ::= '&' Name ';'` [WFC: Entity Declared] [VC: Entity Declared] [WFC: Parsed Entity] [WFC: No Recursion]
|
||||||
|
var EntityRef = reg('&', Name, ';');
|
||||||
|
// https://www.w3.org/TR/xml11/#NT-CharRef
|
||||||
|
// `[66] CharRef ::= '&#' [0-9]+ ';' | '&#x' [0-9a-fA-F]+ ';'` [WFC: Legal Character]
|
||||||
|
var CharRef = regg(/&#[0-9]+;|&#x[0-9a-fA-F]+;/);
|
||||||
|
|
||||||
|
/*
|
||||||
|
https://www.w3.org/TR/xml11/#NT-Reference
|
||||||
|
- `[67] Reference ::= EntityRef | CharRef`
|
||||||
|
- `[66] CharRef ::= '&#' [0-9]+ ';' | '&#x' [0-9a-fA-F]+ ';'` [WFC: Legal Character]
|
||||||
|
- `[68] EntityRef ::= '&' Name ';'` [WFC: Entity Declared] [VC: Entity Declared] [WFC: Parsed Entity] [WFC: No Recursion]
|
||||||
|
*/
|
||||||
|
var Reference = regg(EntityRef, '|', CharRef);
|
||||||
|
|
||||||
|
// https://www.w3.org/TR/xml11/#NT-PEReference
|
||||||
|
// `[69] PEReference ::= '%' Name ';'`
|
||||||
|
// [VC: Entity Declared] [WFC: No Recursion] [WFC: In DTD]
|
||||||
|
var PEReference = reg('%', Name, ';');
|
||||||
|
|
||||||
|
// https://www.w3.org/TR/xml11/#NT-EntityValue
|
||||||
|
// `[9] EntityValue ::= '"' ([^%&"] | PEReference | Reference)* '"' | "'" ([^%&'] | PEReference | Reference)* "'"`
|
||||||
|
var EntityValue = regg(
|
||||||
|
reg('"', regg(/[^%&"]/, '|', PEReference, '|', Reference), '*', '"'),
|
||||||
|
'|',
|
||||||
|
reg("'", regg(/[^%&']/, '|', PEReference, '|', Reference), '*', "'")
|
||||||
|
);
|
||||||
|
|
||||||
|
// https://www.w3.org/TR/xml11/#NT-AttValue
|
||||||
|
// `[10] AttValue ::= '"' ([^<&"] | Reference)* '"' | "'" ([^<&'] | Reference)* "'"`
|
||||||
|
var AttValue = regg('"', regg(/[^<&"]/, '|', Reference), '*', '"', '|', "'", regg(/[^<&']/, '|', Reference), '*', "'");
|
||||||
|
|
||||||
|
// https://www.w3.org/TR/xml-names/#ns-decl
|
||||||
|
// https://www.w3.org/TR/xml-names/#ns-qualnames
|
||||||
|
// NameStartChar without ":"
|
||||||
|
var NCNameStartChar = chars_without(NameStartChar, ':');
|
||||||
|
// https://www.w3.org/TR/xml-names/#orphans
|
||||||
|
// `[5] NCNameChar ::= NameChar - ':'`
|
||||||
|
// An XML NameChar, minus the ":"
|
||||||
|
var NCNameChar = chars_without(NameChar, ':');
|
||||||
|
// https://www.w3.org/TR/xml-names/#NT-NCName
|
||||||
|
// `[4] NCName ::= Name - (Char* ':' Char*)`
|
||||||
|
// An XML Name, minus the ":"
|
||||||
|
var NCName = reg(NCNameStartChar, NCNameChar, '*');
|
||||||
|
|
||||||
|
/**
|
||||||
|
https://www.w3.org/TR/xml-names/#ns-qualnames
|
||||||
|
|
||||||
|
```
|
||||||
|
[7] QName ::= PrefixedName | UnprefixedName
|
||||||
|
=== (NCName ':' NCName) | NCName
|
||||||
|
=== NCName (':' NCName)?
|
||||||
|
[8] PrefixedName ::= Prefix ':' LocalPart
|
||||||
|
=== NCName ':' NCName
|
||||||
|
[9] UnprefixedName ::= LocalPart
|
||||||
|
=== NCName
|
||||||
|
[10] Prefix ::= NCName
|
||||||
|
[11] LocalPart ::= NCName
|
||||||
|
```
|
||||||
|
*/
|
||||||
|
var QName = reg(NCName, regg(':', NCName), '?');
|
||||||
|
var QName_exact = reg('^', QName, '$');
|
||||||
|
var QName_group = reg('(', QName, ')');
|
||||||
|
|
||||||
|
// https://www.w3.org/TR/xml11/#NT-SystemLiteral
|
||||||
|
// `[11] SystemLiteral ::= ('"' [^"]* '"') | ("'" [^']* "'")`
|
||||||
|
var SystemLiteral = regg(/"[^"]*"|'[^']*'/);
|
||||||
|
|
||||||
|
/*
|
||||||
|
https://www.w3.org/TR/xml11/#NT-PI
|
||||||
|
```
|
||||||
|
[17] PITarget ::= Name - (('X' | 'x') ('M' | 'm') ('L' | 'l'))
|
||||||
|
[16] PI ::= '<?' PITarget (S (Char* - (Char* '?>' Char*)))? '?>'
|
||||||
|
```
|
||||||
|
target /xml/i is not excluded!
|
||||||
|
*/
|
||||||
|
var PI = reg(/^<\?/, '(', Name, ')', regg(S, '(', Char, '*?)'), '?', /\?>/);
|
||||||
|
|
||||||
|
// https://www.w3.org/TR/xml11/#NT-PubidChar
|
||||||
|
// `[13] PubidChar ::= #x20 | #xD | #xA | [a-zA-Z0-9] | [-'()+,./:=?;!*#@$_%]`
|
||||||
|
var PubidChar = /[\x20\x0D\x0Aa-zA-Z0-9-'()+,./:=?;!*#@$_%]/;
|
||||||
|
|
||||||
|
// https://www.w3.org/TR/xml11/#NT-PubidLiteral
|
||||||
|
// `[12] PubidLiteral ::= '"' PubidChar* '"' | "'" (PubidChar - "'")* "'"`
|
||||||
|
var PubidLiteral = regg('"', PubidChar, '*"', '|', "'", chars_without(PubidChar, "'"), "*'");
|
||||||
|
|
||||||
|
// https://www.w3.org/TR/xml11/#NT-CharData
|
||||||
|
// `[14] CharData ::= [^<&]* - ([^<&]* ']]>' [^<&]*)`
|
||||||
|
|
||||||
|
var COMMENT_START = '<!--';
|
||||||
|
var COMMENT_END = '-->';
|
||||||
|
// https://www.w3.org/TR/xml11/#NT-Comment
|
||||||
|
// `[15] Comment ::= '<!--' ((Char - '-') | ('-' (Char - '-')))* '-->'`
|
||||||
|
var Comment = reg(COMMENT_START, regg(chars_without(Char, '-'), '|', reg('-', chars_without(Char, '-'))), '*', COMMENT_END);
|
||||||
|
|
||||||
|
var PCDATA = '#PCDATA';
|
||||||
|
// https://www.w3.org/TR/xml11/#NT-Mixed
|
||||||
|
// `[51] Mixed ::= '(' S? '#PCDATA' (S? '|' S? Name)* S? ')*' | '(' S? '#PCDATA' S? ')'`
|
||||||
|
// https://www.w3.org/TR/xml-names/#NT-Mixed
|
||||||
|
// `[51] Mixed ::= '(' S? '#PCDATA' (S? '|' S? QName)* S? ')*' | '(' S? '#PCDATA' S? ')'`
|
||||||
|
// [VC: Proper Group/PE Nesting] [VC: No Duplicate Types]
|
||||||
|
var Mixed = regg(
|
||||||
|
reg(/\(/, S_OPT, PCDATA, regg(S_OPT, /\|/, S_OPT, QName), '*', S_OPT, /\)\*/),
|
||||||
|
'|',
|
||||||
|
reg(/\(/, S_OPT, PCDATA, S_OPT, /\)/)
|
||||||
|
);
|
||||||
|
|
||||||
|
var _children_quantity = /[?*+]?/;
|
||||||
|
/*
|
||||||
|
`[49] choice ::= '(' S? cp ( S? '|' S? cp )+ S? ')'` [VC: Proper Group/PE Nesting]
|
||||||
|
`[50] seq ::= '(' S? cp ( S? ',' S? cp )* S? ')'` [VC: Proper Group/PE Nesting]
|
||||||
|
simplification to solve circular referencing, but doesn't check validity constraint "Proper Group/PE Nesting"
|
||||||
|
var _choice_or_seq = reg('[', NameChar_s, SChar_s, chars(_children_quantity), '()|,]*');
|
||||||
|
```
|
||||||
|
[48] cp ::= (Name | choice | seq) ('?' | '*' | '+')?
|
||||||
|
=== (Name | '(' S? cp ( S? '|' S? cp )+ S? ')' | '(' S? cp ( S? ',' S? cp )* S? ')') ('?' | '*' | '+')?
|
||||||
|
!== (Name | [_choice_or_seq]*) ('?' | '*' | '+')?
|
||||||
|
```
|
||||||
|
simplification to solve circular referencing, but doesn't check validity constraint "Proper Group/PE Nesting"
|
||||||
|
var cp = reg(regg(Name, '|', _choice_or_seq), _children_quantity);
|
||||||
|
*/
|
||||||
|
/*
|
||||||
|
Inefficient regular expression (High)
|
||||||
|
This part of the regular expression may cause exponential backtracking on strings starting with '(|' and containing many repetitions of '|'.
|
||||||
|
https://github.com/xmldom/xmldom/security/code-scanning/91
|
||||||
|
var choice = regg(/\(/, S_OPT, cp, regg(S_OPT, /\|/, S_OPT, cp), '+', S_OPT, /\)/);
|
||||||
|
*/
|
||||||
|
/*
|
||||||
|
Inefficient regular expression (High)
|
||||||
|
This part of the regular expression may cause exponential backtracking on strings starting with '(,' and containing many repetitions of ','.
|
||||||
|
https://github.com/xmldom/xmldom/security/code-scanning/92
|
||||||
|
var seq = regg(/\(/, S_OPT, cp, regg(S_OPT, /,/, S_OPT, cp), '*', S_OPT, /\)/);
|
||||||
|
*/
|
||||||
|
|
||||||
|
// `[47] children ::= (choice | seq) ('?' | '*' | '+')?`
|
||||||
|
// simplification to solve circular referencing, but doesn't check validity constraint "Proper Group/PE Nesting"
|
||||||
|
var children = reg(/\([^>]+\)/, _children_quantity /*regg(choice, '|', seq), _children_quantity*/);
|
||||||
|
|
||||||
|
// https://www.w3.org/TR/xml11/#NT-contentspec
|
||||||
|
// `[46] contentspec ::= 'EMPTY' | 'ANY' | Mixed | children`
|
||||||
|
var contentspec = regg('EMPTY', '|', 'ANY', '|', Mixed, '|', children);
|
||||||
|
|
||||||
|
var ELEMENTDECL_START = '<!ELEMENT';
|
||||||
|
// https://www.w3.org/TR/xml11/#NT-elementdecl
|
||||||
|
// `[45] elementdecl ::= '<!ELEMENT' S Name S contentspec S? '>'`
|
||||||
|
// https://www.w3.org/TR/xml-names/#NT-elementdecl
|
||||||
|
// `[17] elementdecl ::= '<!ELEMENT' S QName S contentspec S? '>'`
|
||||||
|
// because of https://www.w3.org/TR/xml11/#NT-PEReference
|
||||||
|
// since xmldom is not supporting replacements of PEReferences in the DTD
|
||||||
|
// this also supports PEReference in the possible places
|
||||||
|
var elementdecl = reg(ELEMENTDECL_START, S, regg(QName, '|', PEReference), S, regg(contentspec, '|', PEReference), S_OPT, '>');
|
||||||
|
|
||||||
|
// https://www.w3.org/TR/xml11/#NT-NotationType
|
||||||
|
// `[58] NotationType ::= 'NOTATION' S '(' S? Name (S? '|' S? Name)* S? ')'`
|
||||||
|
// [VC: Notation Attributes] [VC: One Notation Per Element Type] [VC: No Notation on Empty Element] [VC: No Duplicate Tokens]
|
||||||
|
var NotationType = reg('NOTATION', S, /\(/, S_OPT, Name, regg(S_OPT, /\|/, S_OPT, Name), '*', S_OPT, /\)/);
|
||||||
|
// https://www.w3.org/TR/xml11/#NT-Enumeration
|
||||||
|
// `[59] Enumeration ::= '(' S? Nmtoken (S? '|' S? Nmtoken)* S? ')'`
|
||||||
|
// [VC: Enumeration] [VC: No Duplicate Tokens]
|
||||||
|
var Enumeration = reg(/\(/, S_OPT, Nmtoken, regg(S_OPT, /\|/, S_OPT, Nmtoken), '*', S_OPT, /\)/);
|
||||||
|
|
||||||
|
// https://www.w3.org/TR/xml11/#NT-EnumeratedType
|
||||||
|
// `[57] EnumeratedType ::= NotationType | Enumeration`
|
||||||
|
var EnumeratedType = regg(NotationType, '|', Enumeration);
|
||||||
|
|
||||||
|
/*
|
||||||
|
```
|
||||||
|
[55] StringType ::= 'CDATA'
|
||||||
|
[56] TokenizedType ::= 'ID' [VC: ID] [VC: One ID per Element Type] [VC: ID Attribute Default]
|
||||||
|
| 'IDREF' [VC: IDREF]
|
||||||
|
| 'IDREFS' [VC: IDREF]
|
||||||
|
| 'ENTITY' [VC: Entity Name]
|
||||||
|
| 'ENTITIES' [VC: Entity Name]
|
||||||
|
| 'NMTOKEN' [VC: Name Token]
|
||||||
|
| 'NMTOKENS' [VC: Name Token]
|
||||||
|
[54] AttType ::= StringType | TokenizedType | EnumeratedType
|
||||||
|
```*/
|
||||||
|
var AttType = regg(/CDATA|ID|IDREF|IDREFS|ENTITY|ENTITIES|NMTOKEN|NMTOKENS/, '|', EnumeratedType);
|
||||||
|
|
||||||
|
// `[60] DefaultDecl ::= '#REQUIRED' | '#IMPLIED' | (('#FIXED' S)? AttValue)`
|
||||||
|
// [WFC: No < in Attribute Values] [WFC: No External Entity References]
|
||||||
|
// [VC: Fixed Attribute Default] [VC: Required Attribute] [VC: Attribute Default Value Syntactically Correct]
|
||||||
|
var DefaultDecl = regg(/#REQUIRED|#IMPLIED/, '|', regg(regg('#FIXED', S), '?', AttValue));
|
||||||
|
|
||||||
|
// https://www.w3.org/TR/xml11/#NT-AttDef
|
||||||
|
// [53] AttDef ::= S Name S AttType S DefaultDecl
|
||||||
|
// https://www.w3.org/TR/xml-names/#NT-AttDef
|
||||||
|
// [1] NSAttName ::= PrefixedAttName | DefaultAttName
|
||||||
|
// [2] PrefixedAttName ::= 'xmlns:' NCName [NSC: Reserved Prefixes and Namespace Names]
|
||||||
|
// [3] DefaultAttName ::= 'xmlns'
|
||||||
|
// [21] AttDef ::= S (QName | NSAttName) S AttType S DefaultDecl
|
||||||
|
// === S Name S AttType S DefaultDecl
|
||||||
|
// xmldom is not distinguishing between QName and NSAttName on this level
|
||||||
|
// to support XML without namespaces in DTD we can not restrict it to QName
|
||||||
|
var AttDef = regg(S, Name, S, AttType, S, DefaultDecl);
|
||||||
|
|
||||||
|
var ATTLIST_DECL_START = '<!ATTLIST';
|
||||||
|
// https://www.w3.org/TR/xml11/#NT-AttlistDecl
|
||||||
|
// `[52] AttlistDecl ::= '<!ATTLIST' S Name AttDef* S? '>'`
|
||||||
|
// https://www.w3.org/TR/xml-names/#NT-AttlistDecl
|
||||||
|
// `[20] AttlistDecl ::= '<!ATTLIST' S QName AttDef* S? '>'`
|
||||||
|
// to support XML without namespaces in DTD we can not restrict it to QName
|
||||||
|
var AttlistDecl = reg(ATTLIST_DECL_START, S, Name, AttDef, '*', S_OPT, '>');
|
||||||
|
|
||||||
|
// https://html.spec.whatwg.org/multipage/urls-and-fetching.html#about:legacy-compat
|
||||||
|
var ABOUT_LEGACY_COMPAT = 'about:legacy-compat';
|
||||||
|
var ABOUT_LEGACY_COMPAT_SystemLiteral = regg('"' + ABOUT_LEGACY_COMPAT + '"', '|', "'" + ABOUT_LEGACY_COMPAT + "'");
|
||||||
|
var SYSTEM = 'SYSTEM';
|
||||||
|
var PUBLIC = 'PUBLIC';
|
||||||
|
// https://www.w3.org/TR/xml11/#NT-ExternalID
|
||||||
|
// `[75] ExternalID ::= 'SYSTEM' S SystemLiteral | 'PUBLIC' S PubidLiteral S SystemLiteral`
|
||||||
|
var ExternalID = regg(regg(SYSTEM, S, SystemLiteral), '|', regg(PUBLIC, S, PubidLiteral, S, SystemLiteral));
|
||||||
|
var ExternalID_match = reg(
|
||||||
|
'^',
|
||||||
|
regg(
|
||||||
|
regg(SYSTEM, S, '(?<SystemLiteralOnly>', SystemLiteral, ')'),
|
||||||
|
'|',
|
||||||
|
regg(PUBLIC, S, '(?<PubidLiteral>', PubidLiteral, ')', S, '(?<SystemLiteral>', SystemLiteral, ')')
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
// https://www.w3.org/TR/xml11/#NT-NDataDecl
|
||||||
|
// `[76] NDataDecl ::= S 'NDATA' S Name` [VC: Notation Declared]
|
||||||
|
var NDataDecl = regg(S, 'NDATA', S, Name);
|
||||||
|
|
||||||
|
// https://www.w3.org/TR/xml11/#NT-EntityDef
|
||||||
|
// `[73] EntityDef ::= EntityValue | (ExternalID NDataDecl?)`
|
||||||
|
var EntityDef = regg(EntityValue, '|', regg(ExternalID, NDataDecl, '?'));
|
||||||
|
|
||||||
|
var ENTITY_DECL_START = '<!ENTITY';
|
||||||
|
// https://www.w3.org/TR/xml11/#NT-GEDecl
|
||||||
|
// `[71] GEDecl ::= '<!ENTITY' S Name S EntityDef S? '>'`
|
||||||
|
var GEDecl = reg(ENTITY_DECL_START, S, Name, S, EntityDef, S_OPT, '>');
|
||||||
|
// https://www.w3.org/TR/xml11/#NT-PEDef
|
||||||
|
// `[74] PEDef ::= EntityValue | ExternalID`
|
||||||
|
var PEDef = regg(EntityValue, '|', ExternalID);
|
||||||
|
// https://www.w3.org/TR/xml11/#NT-PEDecl
|
||||||
|
// `[72] PEDecl ::= '<!ENTITY' S '%' S Name S PEDef S? '>'`
|
||||||
|
var PEDecl = reg(ENTITY_DECL_START, S, '%', S, Name, S, PEDef, S_OPT, '>');
|
||||||
|
// https://www.w3.org/TR/xml11/#NT-EntityDecl
|
||||||
|
// `[70] EntityDecl ::= GEDecl | PEDecl`
|
||||||
|
var EntityDecl = regg(GEDecl, '|', PEDecl);
|
||||||
|
|
||||||
|
// https://www.w3.org/TR/xml11/#NT-PublicID
|
||||||
|
// `[83] PublicID ::= 'PUBLIC' S PubidLiteral`
|
||||||
|
var PublicID = reg(PUBLIC, S, PubidLiteral);
|
||||||
|
// https://www.w3.org/TR/xml11/#NT-NotationDecl
|
||||||
|
// `[82] NotationDecl ::= '<!NOTATION' S Name S (ExternalID | PublicID) S? '>'` [VC: Unique Notation Name]
|
||||||
|
var NotationDecl = reg('<!NOTATION', S, Name, S, regg(ExternalID, '|', PublicID), S_OPT, '>');
|
||||||
|
|
||||||
|
// https://www.w3.org/TR/xml11/#NT-Eq
|
||||||
|
// `[25] Eq ::= S? '=' S?`
|
||||||
|
var Eq = reg(S_OPT, '=', S_OPT);
|
||||||
|
// https://www.w3.org/TR/xml/#NT-VersionNum
|
||||||
|
// `[26] VersionNum ::= '1.' [0-9]+`
|
||||||
|
// https://www.w3.org/TR/xml11/#NT-VersionNum
|
||||||
|
// `[26] VersionNum ::= '1.1'`
|
||||||
|
var VersionNum = /1[.]\d+/;
|
||||||
|
// https://www.w3.org/TR/xml11/#NT-VersionInfo
|
||||||
|
// `[24] VersionInfo ::= S 'version' Eq ("'" VersionNum "'" | '"' VersionNum '"')`
|
||||||
|
var VersionInfo = reg(S, 'version', Eq, regg("'", VersionNum, "'", '|', '"', VersionNum, '"'));
|
||||||
|
// https://www.w3.org/TR/xml11/#NT-EncName
|
||||||
|
// `[81] EncName ::= [A-Za-z] ([A-Za-z0-9._] | '-')*`
|
||||||
|
var EncName = /[A-Za-z][-A-Za-z0-9._]*/;
|
||||||
|
// https://www.w3.org/TR/xml11/#NT-EncDecl
|
||||||
|
// `[80] EncodingDecl ::= S 'encoding' Eq ('"' EncName '"' | "'" EncName "'" )`
|
||||||
|
var EncodingDecl = regg(S, 'encoding', Eq, regg('"', EncName, '"', '|', "'", EncName, "'"));
|
||||||
|
// https://www.w3.org/TR/xml11/#NT-SDDecl
|
||||||
|
// `[32] SDDecl ::= S 'standalone' Eq (("'" ('yes' | 'no') "'") | ('"' ('yes' | 'no') '"'))`
|
||||||
|
var SDDecl = regg(S, 'standalone', Eq, regg("'", regg('yes', '|', 'no'), "'", '|', '"', regg('yes', '|', 'no'), '"'));
|
||||||
|
// https://www.w3.org/TR/xml11/#NT-XMLDecl
|
||||||
|
// [23] XMLDecl ::= '<?xml' VersionInfo EncodingDecl? SDDecl? S? '?>'
|
||||||
|
var XMLDecl = reg(/^<\?xml/, VersionInfo, EncodingDecl, '?', SDDecl, '?', S_OPT, /\?>/);
|
||||||
|
|
||||||
|
/*
|
||||||
|
https://www.w3.org/TR/xml/#NT-markupdecl
|
||||||
|
https://www.w3.org/TR/xml11/#NT-markupdecl
|
||||||
|
`[29] markupdecl ::= elementdecl | AttlistDecl | EntityDecl | NotationDecl | PI | Comment`
|
||||||
|
var markupdecl = regg(elementdecl, '|', AttlistDecl, '|', EntityDecl, '|', NotationDecl, '|', PI_unsafe, '|', Comment);
|
||||||
|
*/
|
||||||
|
/*
|
||||||
|
https://www.w3.org/TR/xml-names/#NT-doctypedecl
|
||||||
|
`[28a] DeclSep ::= PEReference | S`
|
||||||
|
https://www.w3.org/TR/xml11/#NT-intSubset
|
||||||
|
```
|
||||||
|
[28b] intSubset ::= (markupdecl | DeclSep)*
|
||||||
|
=== (markupdecl | PEReference | S)*
|
||||||
|
```
|
||||||
|
[WFC: PE Between Declarations]
|
||||||
|
var intSubset = reg(regg(markupdecl, '|', PEReference, '|', S), '*');
|
||||||
|
*/
|
||||||
|
var DOCTYPE_DECL_START = '<!DOCTYPE';
|
||||||
|
/*
|
||||||
|
https://www.w3.org/TR/xml11/#NT-doctypedecl
|
||||||
|
`[28] doctypedecl ::= '<!DOCTYPE' S Name (S ExternalID)? S? ('[' intSubset ']' S?)? '>'`
|
||||||
|
https://www.afterwardsw3.org/TR/xml-names/#NT-doctypedecl
|
||||||
|
`[16] doctypedecl ::= '<!DOCTYPE' S QName (S ExternalID)? S? ('[' (markupdecl | PEReference | S)* ']' S?)? '>'`
|
||||||
|
var doctypedecl = reg('<!DOCTYPE', S, Name, regg(S, ExternalID), '?', S_OPT, regg(/\[/, intSubset, /]/, S_OPT), '?', '>');
|
||||||
|
*/
|
||||||
|
|
||||||
|
var CDATA_START = '<![CDATA[';
|
||||||
|
var CDATA_END = ']]>';
|
||||||
|
var CDStart = /<!\[CDATA\[/;
|
||||||
|
var CDEnd = /\]\]>/;
|
||||||
|
var CData = reg(Char, '*?', CDEnd);
|
||||||
|
/*
|
||||||
|
https://www.w3.org/TR/xml/#dt-cdsection
|
||||||
|
`[18] CDSect ::= CDStart CData CDEnd`
|
||||||
|
`[19] CDStart ::= '<![CDATA['`
|
||||||
|
`[20] CData ::= (Char* - (Char* ']]>' Char*))`
|
||||||
|
`[21] CDEnd ::= ']]>'`
|
||||||
|
*/
|
||||||
|
var CDSect = reg(CDStart, CData);
|
||||||
|
|
||||||
|
// unit tested
|
||||||
|
exports.chars = chars;
|
||||||
|
exports.chars_without = chars_without;
|
||||||
|
exports.detectUnicodeSupport = detectUnicodeSupport;
|
||||||
|
exports.reg = reg;
|
||||||
|
exports.regg = regg;
|
||||||
|
exports.ABOUT_LEGACY_COMPAT = ABOUT_LEGACY_COMPAT;
|
||||||
|
exports.ABOUT_LEGACY_COMPAT_SystemLiteral = ABOUT_LEGACY_COMPAT_SystemLiteral;
|
||||||
|
exports.AttlistDecl = AttlistDecl;
|
||||||
|
exports.CDATA_START = CDATA_START;
|
||||||
|
exports.CDATA_END = CDATA_END;
|
||||||
|
exports.CDSect = CDSect;
|
||||||
|
exports.Char = Char;
|
||||||
|
exports.Comment = Comment;
|
||||||
|
exports.COMMENT_START = COMMENT_START;
|
||||||
|
exports.COMMENT_END = COMMENT_END;
|
||||||
|
exports.DOCTYPE_DECL_START = DOCTYPE_DECL_START;
|
||||||
|
exports.elementdecl = elementdecl;
|
||||||
|
exports.EntityDecl = EntityDecl;
|
||||||
|
exports.EntityValue = EntityValue;
|
||||||
|
exports.ExternalID = ExternalID;
|
||||||
|
exports.ExternalID_match = ExternalID_match;
|
||||||
|
exports.Name = Name;
|
||||||
|
exports.NotationDecl = NotationDecl;
|
||||||
|
exports.Reference = Reference;
|
||||||
|
exports.PEReference = PEReference;
|
||||||
|
exports.PI = PI;
|
||||||
|
exports.PUBLIC = PUBLIC;
|
||||||
|
exports.PubidLiteral = PubidLiteral;
|
||||||
|
exports.QName = QName;
|
||||||
|
exports.QName_exact = QName_exact;
|
||||||
|
exports.QName_group = QName_group;
|
||||||
|
exports.S = S;
|
||||||
|
exports.SChar_s = SChar_s;
|
||||||
|
exports.S_OPT = S_OPT;
|
||||||
|
exports.SYSTEM = SYSTEM;
|
||||||
|
exports.SystemLiteral = SystemLiteral;
|
||||||
|
exports.UNICODE_REPLACEMENT_CHARACTER = UNICODE_REPLACEMENT_CHARACTER;
|
||||||
|
exports.UNICODE_SUPPORT = UNICODE_SUPPORT;
|
||||||
|
exports.XMLDecl = XMLDecl;
|
||||||
41
tradeCattle/aiotagro-cattle-trade/node_modules/@xmldom/xmldom/lib/index.js
generated
vendored
Normal file
41
tradeCattle/aiotagro-cattle-trade/node_modules/@xmldom/xmldom/lib/index.js
generated
vendored
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
'use strict';
|
||||||
|
var conventions = require('./conventions');
|
||||||
|
exports.assign = conventions.assign;
|
||||||
|
exports.hasDefaultHTMLNamespace = conventions.hasDefaultHTMLNamespace;
|
||||||
|
exports.isHTMLMimeType = conventions.isHTMLMimeType;
|
||||||
|
exports.isValidMimeType = conventions.isValidMimeType;
|
||||||
|
exports.MIME_TYPE = conventions.MIME_TYPE;
|
||||||
|
exports.NAMESPACE = conventions.NAMESPACE;
|
||||||
|
|
||||||
|
var errors = require('./errors');
|
||||||
|
exports.DOMException = errors.DOMException;
|
||||||
|
exports.DOMExceptionName = errors.DOMExceptionName;
|
||||||
|
exports.ExceptionCode = errors.ExceptionCode;
|
||||||
|
exports.ParseError = errors.ParseError;
|
||||||
|
|
||||||
|
var dom = require('./dom');
|
||||||
|
exports.Attr = dom.Attr;
|
||||||
|
exports.CDATASection = dom.CDATASection;
|
||||||
|
exports.CharacterData = dom.CharacterData;
|
||||||
|
exports.Comment = dom.Comment;
|
||||||
|
exports.Document = dom.Document;
|
||||||
|
exports.DocumentFragment = dom.DocumentFragment;
|
||||||
|
exports.DocumentType = dom.DocumentType;
|
||||||
|
exports.DOMImplementation = dom.DOMImplementation;
|
||||||
|
exports.Element = dom.Element;
|
||||||
|
exports.Entity = dom.Entity;
|
||||||
|
exports.EntityReference = dom.EntityReference;
|
||||||
|
exports.LiveNodeList = dom.LiveNodeList;
|
||||||
|
exports.NamedNodeMap = dom.NamedNodeMap;
|
||||||
|
exports.Node = dom.Node;
|
||||||
|
exports.NodeList = dom.NodeList;
|
||||||
|
exports.Notation = dom.Notation;
|
||||||
|
exports.ProcessingInstruction = dom.ProcessingInstruction;
|
||||||
|
exports.Text = dom.Text;
|
||||||
|
exports.XMLSerializer = dom.XMLSerializer;
|
||||||
|
|
||||||
|
var domParser = require('./dom-parser');
|
||||||
|
exports.DOMParser = domParser.DOMParser;
|
||||||
|
exports.normalizeLineEndings = domParser.normalizeLineEndings;
|
||||||
|
exports.onErrorStopParsing = domParser.onErrorStopParsing;
|
||||||
|
exports.onWarningStopParsing = domParser.onWarningStopParsing;
|
||||||
929
tradeCattle/aiotagro-cattle-trade/node_modules/@xmldom/xmldom/lib/sax.js
generated
vendored
Normal file
929
tradeCattle/aiotagro-cattle-trade/node_modules/@xmldom/xmldom/lib/sax.js
generated
vendored
Normal file
@@ -0,0 +1,929 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
var conventions = require('./conventions');
|
||||||
|
var g = require('./grammar');
|
||||||
|
var errors = require('./errors');
|
||||||
|
|
||||||
|
var isHTMLEscapableRawTextElement = conventions.isHTMLEscapableRawTextElement;
|
||||||
|
var isHTMLMimeType = conventions.isHTMLMimeType;
|
||||||
|
var isHTMLRawTextElement = conventions.isHTMLRawTextElement;
|
||||||
|
var hasOwn = conventions.hasOwn;
|
||||||
|
var NAMESPACE = conventions.NAMESPACE;
|
||||||
|
var ParseError = errors.ParseError;
|
||||||
|
var DOMException = errors.DOMException;
|
||||||
|
|
||||||
|
//var handlers = 'resolveEntity,getExternalSubset,characters,endDocument,endElement,endPrefixMapping,ignorableWhitespace,processingInstruction,setDocumentLocator,skippedEntity,startDocument,startElement,startPrefixMapping,notationDecl,unparsedEntityDecl,error,fatalError,warning,attributeDecl,elementDecl,externalEntityDecl,internalEntityDecl,comment,endCDATA,endDTD,endEntity,startCDATA,startDTD,startEntity'.split(',')
|
||||||
|
|
||||||
|
//S_TAG, S_ATTR, S_EQ, S_ATTR_NOQUOT_VALUE
|
||||||
|
//S_ATTR_SPACE, S_ATTR_END, S_TAG_SPACE, S_TAG_CLOSE
|
||||||
|
var S_TAG = 0; //tag name offerring
|
||||||
|
var S_ATTR = 1; //attr name offerring
|
||||||
|
var S_ATTR_SPACE = 2; //attr name end and space offer
|
||||||
|
var S_EQ = 3; //=space?
|
||||||
|
var S_ATTR_NOQUOT_VALUE = 4; //attr value(no quot value only)
|
||||||
|
var S_ATTR_END = 5; //attr value end and no space(quot end)
|
||||||
|
var S_TAG_SPACE = 6; //(attr value end || tag end ) && (space offer)
|
||||||
|
var S_TAG_CLOSE = 7; //closed el<el />
|
||||||
|
|
||||||
|
function XMLReader() {}
|
||||||
|
|
||||||
|
XMLReader.prototype = {
|
||||||
|
parse: function (source, defaultNSMap, entityMap) {
|
||||||
|
var domBuilder = this.domBuilder;
|
||||||
|
domBuilder.startDocument();
|
||||||
|
_copy(defaultNSMap, (defaultNSMap = Object.create(null)));
|
||||||
|
parse(source, defaultNSMap, entityMap, domBuilder, this.errorHandler);
|
||||||
|
domBuilder.endDocument();
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Detecting everything that might be a reference,
|
||||||
|
* including those without ending `;`, since those are allowed in HTML.
|
||||||
|
* The entityReplacer takes care of verifying and transforming each occurrence,
|
||||||
|
* and reports to the errorHandler on those that are not OK,
|
||||||
|
* depending on the context.
|
||||||
|
*/
|
||||||
|
var ENTITY_REG = /&#?\w+;?/g;
|
||||||
|
|
||||||
|
function parse(source, defaultNSMapCopy, entityMap, domBuilder, errorHandler) {
|
||||||
|
var isHTML = isHTMLMimeType(domBuilder.mimeType);
|
||||||
|
if (source.indexOf(g.UNICODE_REPLACEMENT_CHARACTER) >= 0) {
|
||||||
|
errorHandler.warning('Unicode replacement character detected, source encoding issues?');
|
||||||
|
}
|
||||||
|
|
||||||
|
function fixedFromCharCode(code) {
|
||||||
|
// String.prototype.fromCharCode does not supports
|
||||||
|
// > 2 bytes unicode chars directly
|
||||||
|
if (code > 0xffff) {
|
||||||
|
code -= 0x10000;
|
||||||
|
var surrogate1 = 0xd800 + (code >> 10),
|
||||||
|
surrogate2 = 0xdc00 + (code & 0x3ff);
|
||||||
|
|
||||||
|
return String.fromCharCode(surrogate1, surrogate2);
|
||||||
|
} else {
|
||||||
|
return String.fromCharCode(code);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function entityReplacer(a) {
|
||||||
|
var complete = a[a.length - 1] === ';' ? a : a + ';';
|
||||||
|
if (!isHTML && complete !== a) {
|
||||||
|
errorHandler.error('EntityRef: expecting ;');
|
||||||
|
return a;
|
||||||
|
}
|
||||||
|
var match = g.Reference.exec(complete);
|
||||||
|
if (!match || match[0].length !== complete.length) {
|
||||||
|
errorHandler.error('entity not matching Reference production: ' + a);
|
||||||
|
return a;
|
||||||
|
}
|
||||||
|
var k = complete.slice(1, -1);
|
||||||
|
if (hasOwn(entityMap, k)) {
|
||||||
|
return entityMap[k];
|
||||||
|
} else if (k.charAt(0) === '#') {
|
||||||
|
return fixedFromCharCode(parseInt(k.substring(1).replace('x', '0x')));
|
||||||
|
} else {
|
||||||
|
errorHandler.error('entity not found:' + a);
|
||||||
|
return a;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function appendText(end) {
|
||||||
|
//has some bugs
|
||||||
|
if (end > start) {
|
||||||
|
var xt = source.substring(start, end).replace(ENTITY_REG, entityReplacer);
|
||||||
|
locator && position(start);
|
||||||
|
domBuilder.characters(xt, 0, end - start);
|
||||||
|
start = end;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var lineStart = 0;
|
||||||
|
var lineEnd = 0;
|
||||||
|
var linePattern = /\r\n?|\n|$/g;
|
||||||
|
var locator = domBuilder.locator;
|
||||||
|
|
||||||
|
function position(p, m) {
|
||||||
|
while (p >= lineEnd && (m = linePattern.exec(source))) {
|
||||||
|
lineStart = lineEnd;
|
||||||
|
lineEnd = m.index + m[0].length;
|
||||||
|
locator.lineNumber++;
|
||||||
|
}
|
||||||
|
locator.columnNumber = p - lineStart + 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
var parseStack = [{ currentNSMap: defaultNSMapCopy }];
|
||||||
|
var unclosedTags = [];
|
||||||
|
var start = 0;
|
||||||
|
while (true) {
|
||||||
|
try {
|
||||||
|
var tagStart = source.indexOf('<', start);
|
||||||
|
if (tagStart < 0) {
|
||||||
|
if (!isHTML && unclosedTags.length > 0) {
|
||||||
|
return errorHandler.fatalError('unclosed xml tag(s): ' + unclosedTags.join(', '));
|
||||||
|
}
|
||||||
|
if (!source.substring(start).match(/^\s*$/)) {
|
||||||
|
var doc = domBuilder.doc;
|
||||||
|
var text = doc.createTextNode(source.substring(start));
|
||||||
|
if (doc.documentElement) {
|
||||||
|
return errorHandler.error('Extra content at the end of the document');
|
||||||
|
}
|
||||||
|
doc.appendChild(text);
|
||||||
|
domBuilder.currentElement = text;
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (tagStart > start) {
|
||||||
|
var fromSource = source.substring(start, tagStart);
|
||||||
|
if (!isHTML && unclosedTags.length === 0) {
|
||||||
|
fromSource = fromSource.replace(new RegExp(g.S_OPT.source, 'g'), '');
|
||||||
|
fromSource && errorHandler.error("Unexpected content outside root element: '" + fromSource + "'");
|
||||||
|
}
|
||||||
|
appendText(tagStart);
|
||||||
|
}
|
||||||
|
switch (source.charAt(tagStart + 1)) {
|
||||||
|
case '/':
|
||||||
|
var end = source.indexOf('>', tagStart + 2);
|
||||||
|
var tagNameRaw = source.substring(tagStart + 2, end > 0 ? end : undefined);
|
||||||
|
if (!tagNameRaw) {
|
||||||
|
return errorHandler.fatalError('end tag name missing');
|
||||||
|
}
|
||||||
|
var tagNameMatch = end > 0 && g.reg('^', g.QName_group, g.S_OPT, '$').exec(tagNameRaw);
|
||||||
|
if (!tagNameMatch) {
|
||||||
|
return errorHandler.fatalError('end tag name contains invalid characters: "' + tagNameRaw + '"');
|
||||||
|
}
|
||||||
|
if (!domBuilder.currentElement && !domBuilder.doc.documentElement) {
|
||||||
|
// not enough information to provide a helpful error message,
|
||||||
|
// but parsing will throw since there is no root element
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
var currentTagName =
|
||||||
|
unclosedTags[unclosedTags.length - 1] ||
|
||||||
|
domBuilder.currentElement.tagName ||
|
||||||
|
domBuilder.doc.documentElement.tagName ||
|
||||||
|
'';
|
||||||
|
if (currentTagName !== tagNameMatch[1]) {
|
||||||
|
var tagNameLower = tagNameMatch[1].toLowerCase();
|
||||||
|
if (!isHTML || currentTagName.toLowerCase() !== tagNameLower) {
|
||||||
|
return errorHandler.fatalError('Opening and ending tag mismatch: "' + currentTagName + '" != "' + tagNameRaw + '"');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
var config = parseStack.pop();
|
||||||
|
unclosedTags.pop();
|
||||||
|
var localNSMap = config.localNSMap;
|
||||||
|
domBuilder.endElement(config.uri, config.localName, currentTagName);
|
||||||
|
if (localNSMap) {
|
||||||
|
for (var prefix in localNSMap) {
|
||||||
|
if (hasOwn(localNSMap, prefix)) {
|
||||||
|
domBuilder.endPrefixMapping(prefix);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
end++;
|
||||||
|
break;
|
||||||
|
// end element
|
||||||
|
case '?': // <?...?>
|
||||||
|
locator && position(tagStart);
|
||||||
|
end = parseProcessingInstruction(source, tagStart, domBuilder, errorHandler);
|
||||||
|
break;
|
||||||
|
case '!': // <!doctype,<![CDATA,<!--
|
||||||
|
locator && position(tagStart);
|
||||||
|
end = parseDoctypeCommentOrCData(source, tagStart, domBuilder, errorHandler, isHTML);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
locator && position(tagStart);
|
||||||
|
var el = new ElementAttributes();
|
||||||
|
var currentNSMap = parseStack[parseStack.length - 1].currentNSMap;
|
||||||
|
//elStartEnd
|
||||||
|
var end = parseElementStartPart(source, tagStart, el, currentNSMap, entityReplacer, errorHandler, isHTML);
|
||||||
|
var len = el.length;
|
||||||
|
|
||||||
|
if (!el.closed) {
|
||||||
|
if (isHTML && conventions.isHTMLVoidElement(el.tagName)) {
|
||||||
|
el.closed = true;
|
||||||
|
} else {
|
||||||
|
unclosedTags.push(el.tagName);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (locator && len) {
|
||||||
|
var locator2 = copyLocator(locator, {});
|
||||||
|
//try{//attribute position fixed
|
||||||
|
for (var i = 0; i < len; i++) {
|
||||||
|
var a = el[i];
|
||||||
|
position(a.offset);
|
||||||
|
a.locator = copyLocator(locator, {});
|
||||||
|
}
|
||||||
|
domBuilder.locator = locator2;
|
||||||
|
if (appendElement(el, domBuilder, currentNSMap)) {
|
||||||
|
parseStack.push(el);
|
||||||
|
}
|
||||||
|
domBuilder.locator = locator;
|
||||||
|
} else {
|
||||||
|
if (appendElement(el, domBuilder, currentNSMap)) {
|
||||||
|
parseStack.push(el);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isHTML && !el.closed) {
|
||||||
|
end = parseHtmlSpecialContent(source, end, el.tagName, entityReplacer, domBuilder);
|
||||||
|
} else {
|
||||||
|
end++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
if (e instanceof ParseError) {
|
||||||
|
throw e;
|
||||||
|
} else if (e instanceof DOMException) {
|
||||||
|
throw new ParseError(e.name + ': ' + e.message, domBuilder.locator, e);
|
||||||
|
}
|
||||||
|
errorHandler.error('element parse error: ' + e);
|
||||||
|
end = -1;
|
||||||
|
}
|
||||||
|
if (end > start) {
|
||||||
|
start = end;
|
||||||
|
} else {
|
||||||
|
//Possible sax fallback here, risk of positional error
|
||||||
|
appendText(Math.max(tagStart, start) + 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function copyLocator(f, t) {
|
||||||
|
t.lineNumber = f.lineNumber;
|
||||||
|
t.columnNumber = f.columnNumber;
|
||||||
|
return t;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @returns
|
||||||
|
* end of the elementStartPart(end of elementEndPart for selfClosed el)
|
||||||
|
* @see {@link #appendElement}
|
||||||
|
*/
|
||||||
|
function parseElementStartPart(source, start, el, currentNSMap, entityReplacer, errorHandler, isHTML) {
|
||||||
|
/**
|
||||||
|
* @param {string} qname
|
||||||
|
* @param {string} value
|
||||||
|
* @param {number} startIndex
|
||||||
|
*/
|
||||||
|
function addAttribute(qname, value, startIndex) {
|
||||||
|
if (hasOwn(el.attributeNames, qname)) {
|
||||||
|
return errorHandler.fatalError('Attribute ' + qname + ' redefined');
|
||||||
|
}
|
||||||
|
if (!isHTML && value.indexOf('<') >= 0) {
|
||||||
|
return errorHandler.fatalError("Unescaped '<' not allowed in attributes values");
|
||||||
|
}
|
||||||
|
el.addValue(
|
||||||
|
qname,
|
||||||
|
// @see https://www.w3.org/TR/xml/#AVNormalize
|
||||||
|
// since the xmldom sax parser does not "interpret" DTD the following is not implemented:
|
||||||
|
// - recursive replacement of (DTD) entity references
|
||||||
|
// - trimming and collapsing multiple spaces into a single one for attributes that are not of type CDATA
|
||||||
|
value.replace(/[\t\n\r]/g, ' ').replace(ENTITY_REG, entityReplacer),
|
||||||
|
startIndex
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
var attrName;
|
||||||
|
var value;
|
||||||
|
var p = ++start;
|
||||||
|
var s = S_TAG; //status
|
||||||
|
while (true) {
|
||||||
|
var c = source.charAt(p);
|
||||||
|
switch (c) {
|
||||||
|
case '=':
|
||||||
|
if (s === S_ATTR) {
|
||||||
|
//attrName
|
||||||
|
attrName = source.slice(start, p);
|
||||||
|
s = S_EQ;
|
||||||
|
} else if (s === S_ATTR_SPACE) {
|
||||||
|
s = S_EQ;
|
||||||
|
} else {
|
||||||
|
//fatalError: equal must after attrName or space after attrName
|
||||||
|
throw new Error('attribute equal must after attrName'); // No known test case
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case "'":
|
||||||
|
case '"':
|
||||||
|
if (
|
||||||
|
s === S_EQ ||
|
||||||
|
s === S_ATTR //|| s == S_ATTR_SPACE
|
||||||
|
) {
|
||||||
|
//equal
|
||||||
|
if (s === S_ATTR) {
|
||||||
|
errorHandler.warning('attribute value must after "="');
|
||||||
|
attrName = source.slice(start, p);
|
||||||
|
}
|
||||||
|
start = p + 1;
|
||||||
|
p = source.indexOf(c, start);
|
||||||
|
if (p > 0) {
|
||||||
|
value = source.slice(start, p);
|
||||||
|
addAttribute(attrName, value, start - 1);
|
||||||
|
s = S_ATTR_END;
|
||||||
|
} else {
|
||||||
|
//fatalError: no end quot match
|
||||||
|
throw new Error("attribute value no end '" + c + "' match");
|
||||||
|
}
|
||||||
|
} else if (s == S_ATTR_NOQUOT_VALUE) {
|
||||||
|
value = source.slice(start, p);
|
||||||
|
addAttribute(attrName, value, start);
|
||||||
|
errorHandler.warning('attribute "' + attrName + '" missed start quot(' + c + ')!!');
|
||||||
|
start = p + 1;
|
||||||
|
s = S_ATTR_END;
|
||||||
|
} else {
|
||||||
|
//fatalError: no equal before
|
||||||
|
throw new Error('attribute value must after "="'); // No known test case
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case '/':
|
||||||
|
switch (s) {
|
||||||
|
case S_TAG:
|
||||||
|
el.setTagName(source.slice(start, p));
|
||||||
|
case S_ATTR_END:
|
||||||
|
case S_TAG_SPACE:
|
||||||
|
case S_TAG_CLOSE:
|
||||||
|
s = S_TAG_CLOSE;
|
||||||
|
el.closed = true;
|
||||||
|
case S_ATTR_NOQUOT_VALUE:
|
||||||
|
case S_ATTR:
|
||||||
|
break;
|
||||||
|
case S_ATTR_SPACE:
|
||||||
|
el.closed = true;
|
||||||
|
break;
|
||||||
|
//case S_EQ:
|
||||||
|
default:
|
||||||
|
throw new Error("attribute invalid close char('/')"); // No known test case
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case '': //end document
|
||||||
|
errorHandler.error('unexpected end of input');
|
||||||
|
if (s == S_TAG) {
|
||||||
|
el.setTagName(source.slice(start, p));
|
||||||
|
}
|
||||||
|
return p;
|
||||||
|
case '>':
|
||||||
|
switch (s) {
|
||||||
|
case S_TAG:
|
||||||
|
el.setTagName(source.slice(start, p));
|
||||||
|
case S_ATTR_END:
|
||||||
|
case S_TAG_SPACE:
|
||||||
|
case S_TAG_CLOSE:
|
||||||
|
break; //normal
|
||||||
|
case S_ATTR_NOQUOT_VALUE: //Compatible state
|
||||||
|
case S_ATTR:
|
||||||
|
value = source.slice(start, p);
|
||||||
|
if (value.slice(-1) === '/') {
|
||||||
|
el.closed = true;
|
||||||
|
value = value.slice(0, -1);
|
||||||
|
}
|
||||||
|
case S_ATTR_SPACE:
|
||||||
|
if (s === S_ATTR_SPACE) {
|
||||||
|
value = attrName;
|
||||||
|
}
|
||||||
|
if (s == S_ATTR_NOQUOT_VALUE) {
|
||||||
|
errorHandler.warning('attribute "' + value + '" missed quot(")!');
|
||||||
|
addAttribute(attrName, value, start);
|
||||||
|
} else {
|
||||||
|
if (!isHTML) {
|
||||||
|
errorHandler.warning('attribute "' + value + '" missed value!! "' + value + '" instead!!');
|
||||||
|
}
|
||||||
|
addAttribute(value, value, start);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case S_EQ:
|
||||||
|
if (!isHTML) {
|
||||||
|
return errorHandler.fatalError('AttValue: \' or " expected');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return p;
|
||||||
|
/*xml space '\x20' | #x9 | #xD | #xA; */
|
||||||
|
case '\u0080':
|
||||||
|
c = ' ';
|
||||||
|
default:
|
||||||
|
if (c <= ' ') {
|
||||||
|
//space
|
||||||
|
switch (s) {
|
||||||
|
case S_TAG:
|
||||||
|
el.setTagName(source.slice(start, p)); //tagName
|
||||||
|
s = S_TAG_SPACE;
|
||||||
|
break;
|
||||||
|
case S_ATTR:
|
||||||
|
attrName = source.slice(start, p);
|
||||||
|
s = S_ATTR_SPACE;
|
||||||
|
break;
|
||||||
|
case S_ATTR_NOQUOT_VALUE:
|
||||||
|
var value = source.slice(start, p);
|
||||||
|
errorHandler.warning('attribute "' + value + '" missed quot(")!!');
|
||||||
|
addAttribute(attrName, value, start);
|
||||||
|
case S_ATTR_END:
|
||||||
|
s = S_TAG_SPACE;
|
||||||
|
break;
|
||||||
|
//case S_TAG_SPACE:
|
||||||
|
//case S_EQ:
|
||||||
|
//case S_ATTR_SPACE:
|
||||||
|
// void();break;
|
||||||
|
//case S_TAG_CLOSE:
|
||||||
|
//ignore warning
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
//not space
|
||||||
|
//S_TAG, S_ATTR, S_EQ, S_ATTR_NOQUOT_VALUE
|
||||||
|
//S_ATTR_SPACE, S_ATTR_END, S_TAG_SPACE, S_TAG_CLOSE
|
||||||
|
switch (s) {
|
||||||
|
//case S_TAG:void();break;
|
||||||
|
//case S_ATTR:void();break;
|
||||||
|
//case S_ATTR_NOQUOT_VALUE:void();break;
|
||||||
|
case S_ATTR_SPACE:
|
||||||
|
if (!isHTML) {
|
||||||
|
errorHandler.warning('attribute "' + attrName + '" missed value!! "' + attrName + '" instead2!!');
|
||||||
|
}
|
||||||
|
addAttribute(attrName, attrName, start);
|
||||||
|
start = p;
|
||||||
|
s = S_ATTR;
|
||||||
|
break;
|
||||||
|
case S_ATTR_END:
|
||||||
|
errorHandler.warning('attribute space is required"' + attrName + '"!!');
|
||||||
|
case S_TAG_SPACE:
|
||||||
|
s = S_ATTR;
|
||||||
|
start = p;
|
||||||
|
break;
|
||||||
|
case S_EQ:
|
||||||
|
s = S_ATTR_NOQUOT_VALUE;
|
||||||
|
start = p;
|
||||||
|
break;
|
||||||
|
case S_TAG_CLOSE:
|
||||||
|
throw new Error("elements closed character '/' and '>' must be connected to");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} //end outer switch
|
||||||
|
p++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @returns
|
||||||
|
* `true` if a new namespace has been defined.
|
||||||
|
*/
|
||||||
|
function appendElement(el, domBuilder, currentNSMap) {
|
||||||
|
var tagName = el.tagName;
|
||||||
|
var localNSMap = null;
|
||||||
|
var i = el.length;
|
||||||
|
while (i--) {
|
||||||
|
var a = el[i];
|
||||||
|
var qName = a.qName;
|
||||||
|
var value = a.value;
|
||||||
|
var nsp = qName.indexOf(':');
|
||||||
|
if (nsp > 0) {
|
||||||
|
var prefix = (a.prefix = qName.slice(0, nsp));
|
||||||
|
var localName = qName.slice(nsp + 1);
|
||||||
|
var nsPrefix = prefix === 'xmlns' && localName;
|
||||||
|
} else {
|
||||||
|
localName = qName;
|
||||||
|
prefix = null;
|
||||||
|
nsPrefix = qName === 'xmlns' && '';
|
||||||
|
}
|
||||||
|
//can not set prefix,because prefix !== ''
|
||||||
|
a.localName = localName;
|
||||||
|
//prefix == null for no ns prefix attribute
|
||||||
|
if (nsPrefix !== false) {
|
||||||
|
//hack!!
|
||||||
|
if (localNSMap == null) {
|
||||||
|
localNSMap = Object.create(null);
|
||||||
|
_copy(currentNSMap, (currentNSMap = Object.create(null)));
|
||||||
|
}
|
||||||
|
currentNSMap[nsPrefix] = localNSMap[nsPrefix] = value;
|
||||||
|
a.uri = NAMESPACE.XMLNS;
|
||||||
|
domBuilder.startPrefixMapping(nsPrefix, value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
var i = el.length;
|
||||||
|
while (i--) {
|
||||||
|
a = el[i];
|
||||||
|
if (a.prefix) {
|
||||||
|
//no prefix attribute has no namespace
|
||||||
|
if (a.prefix === 'xml') {
|
||||||
|
a.uri = NAMESPACE.XML;
|
||||||
|
}
|
||||||
|
if (a.prefix !== 'xmlns') {
|
||||||
|
a.uri = currentNSMap[a.prefix];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
var nsp = tagName.indexOf(':');
|
||||||
|
if (nsp > 0) {
|
||||||
|
prefix = el.prefix = tagName.slice(0, nsp);
|
||||||
|
localName = el.localName = tagName.slice(nsp + 1);
|
||||||
|
} else {
|
||||||
|
prefix = null; //important!!
|
||||||
|
localName = el.localName = tagName;
|
||||||
|
}
|
||||||
|
//no prefix element has default namespace
|
||||||
|
var ns = (el.uri = currentNSMap[prefix || '']);
|
||||||
|
domBuilder.startElement(ns, localName, tagName, el);
|
||||||
|
//endPrefixMapping and startPrefixMapping have not any help for dom builder
|
||||||
|
//localNSMap = null
|
||||||
|
if (el.closed) {
|
||||||
|
domBuilder.endElement(ns, localName, tagName);
|
||||||
|
if (localNSMap) {
|
||||||
|
for (prefix in localNSMap) {
|
||||||
|
if (hasOwn(localNSMap, prefix)) {
|
||||||
|
domBuilder.endPrefixMapping(prefix);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
el.currentNSMap = currentNSMap;
|
||||||
|
el.localNSMap = localNSMap;
|
||||||
|
//parseStack.push(el);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseHtmlSpecialContent(source, elStartEnd, tagName, entityReplacer, domBuilder) {
|
||||||
|
// https://html.spec.whatwg.org/#raw-text-elements
|
||||||
|
// https://html.spec.whatwg.org/#escapable-raw-text-elements
|
||||||
|
// https://html.spec.whatwg.org/#cdata-rcdata-restrictions:raw-text-elements
|
||||||
|
// TODO: https://html.spec.whatwg.org/#cdata-rcdata-restrictions
|
||||||
|
var isEscapableRaw = isHTMLEscapableRawTextElement(tagName);
|
||||||
|
if (isEscapableRaw || isHTMLRawTextElement(tagName)) {
|
||||||
|
var elEndStart = source.indexOf('</' + tagName + '>', elStartEnd);
|
||||||
|
var text = source.substring(elStartEnd + 1, elEndStart);
|
||||||
|
|
||||||
|
if (isEscapableRaw) {
|
||||||
|
text = text.replace(ENTITY_REG, entityReplacer);
|
||||||
|
}
|
||||||
|
domBuilder.characters(text, 0, text.length);
|
||||||
|
return elEndStart;
|
||||||
|
}
|
||||||
|
return elStartEnd + 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
function _copy(source, target) {
|
||||||
|
for (var n in source) {
|
||||||
|
if (hasOwn(source, n)) {
|
||||||
|
target[n] = source[n];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @typedef ParseUtils
|
||||||
|
* @property {function(relativeIndex: number?): string | undefined} char
|
||||||
|
* Provides look ahead access to a singe character relative to the current index.
|
||||||
|
* @property {function(): number} getIndex
|
||||||
|
* Provides read-only access to the current index.
|
||||||
|
* @property {function(reg: RegExp): string | null} getMatch
|
||||||
|
* Applies the provided regular expression enforcing that it starts at the current index and
|
||||||
|
* returns the complete matching string,
|
||||||
|
* and moves the current index by the length of the matching string.
|
||||||
|
* @property {function(): string} getSource
|
||||||
|
* Provides read-only access to the complete source.
|
||||||
|
* @property {function(places: number?): void} skip
|
||||||
|
* moves the current index by places (defaults to 1)
|
||||||
|
* @property {function(): number} skipBlanks
|
||||||
|
* Moves the current index by the amount of white space that directly follows the current index
|
||||||
|
* and returns the amount of whitespace chars skipped (0..n),
|
||||||
|
* or -1 if the end of the source was reached.
|
||||||
|
* @property {function(): string} substringFromIndex
|
||||||
|
* creates a substring from the current index to the end of `source`
|
||||||
|
* @property {function(compareWith: string): boolean} substringStartsWith
|
||||||
|
* Checks if `source` contains `compareWith`, starting from the current index.
|
||||||
|
* @property {function(compareWith: string): boolean} substringStartsWithCaseInsensitive
|
||||||
|
* Checks if `source` contains `compareWith`, starting from the current index,
|
||||||
|
* comparing the upper case of both sides.
|
||||||
|
* @see {@link parseUtils}
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A temporary scope for parsing and look ahead operations in `source`,
|
||||||
|
* starting from index `start`.
|
||||||
|
*
|
||||||
|
* Some operations move the current index by a number of positions,
|
||||||
|
* after which `getIndex` returns the new index.
|
||||||
|
*
|
||||||
|
* @param {string} source
|
||||||
|
* @param {number} start
|
||||||
|
* @returns {ParseUtils}
|
||||||
|
*/
|
||||||
|
function parseUtils(source, start) {
|
||||||
|
var index = start;
|
||||||
|
|
||||||
|
function char(n) {
|
||||||
|
n = n || 0;
|
||||||
|
return source.charAt(index + n);
|
||||||
|
}
|
||||||
|
|
||||||
|
function skip(n) {
|
||||||
|
n = n || 1;
|
||||||
|
index += n;
|
||||||
|
}
|
||||||
|
|
||||||
|
function skipBlanks() {
|
||||||
|
var blanks = 0;
|
||||||
|
while (index < source.length) {
|
||||||
|
var c = char();
|
||||||
|
if (c !== ' ' && c !== '\n' && c !== '\t' && c !== '\r') {
|
||||||
|
return blanks;
|
||||||
|
}
|
||||||
|
blanks++;
|
||||||
|
skip();
|
||||||
|
}
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
function substringFromIndex() {
|
||||||
|
return source.substring(index);
|
||||||
|
}
|
||||||
|
function substringStartsWith(text) {
|
||||||
|
return source.substring(index, index + text.length) === text;
|
||||||
|
}
|
||||||
|
function substringStartsWithCaseInsensitive(text) {
|
||||||
|
return source.substring(index, index + text.length).toUpperCase() === text.toUpperCase();
|
||||||
|
}
|
||||||
|
|
||||||
|
function getMatch(args) {
|
||||||
|
var expr = g.reg('^', args);
|
||||||
|
var match = expr.exec(substringFromIndex());
|
||||||
|
if (match) {
|
||||||
|
skip(match[0].length);
|
||||||
|
return match[0];
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
char: char,
|
||||||
|
getIndex: function () {
|
||||||
|
return index;
|
||||||
|
},
|
||||||
|
getMatch: getMatch,
|
||||||
|
getSource: function () {
|
||||||
|
return source;
|
||||||
|
},
|
||||||
|
skip: skip,
|
||||||
|
skipBlanks: skipBlanks,
|
||||||
|
substringFromIndex: substringFromIndex,
|
||||||
|
substringStartsWith: substringStartsWith,
|
||||||
|
substringStartsWithCaseInsensitive: substringStartsWithCaseInsensitive,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {ParseUtils} p
|
||||||
|
* @param {DOMHandler} errorHandler
|
||||||
|
* @returns {string}
|
||||||
|
*/
|
||||||
|
function parseDoctypeInternalSubset(p, errorHandler) {
|
||||||
|
/**
|
||||||
|
* @param {ParseUtils} p
|
||||||
|
* @param {DOMHandler} errorHandler
|
||||||
|
* @returns {string}
|
||||||
|
*/
|
||||||
|
function parsePI(p, errorHandler) {
|
||||||
|
var match = g.PI.exec(p.substringFromIndex());
|
||||||
|
if (!match) {
|
||||||
|
return errorHandler.fatalError('processing instruction is not well-formed at position ' + p.getIndex());
|
||||||
|
}
|
||||||
|
if (match[1].toLowerCase() === 'xml') {
|
||||||
|
return errorHandler.fatalError(
|
||||||
|
'xml declaration is only allowed at the start of the document, but found at position ' + p.getIndex()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
p.skip(match[0].length);
|
||||||
|
return match[0];
|
||||||
|
}
|
||||||
|
// Parse internal subset
|
||||||
|
var source = p.getSource();
|
||||||
|
if (p.char() === '[') {
|
||||||
|
p.skip(1);
|
||||||
|
var intSubsetStart = p.getIndex();
|
||||||
|
while (p.getIndex() < source.length) {
|
||||||
|
p.skipBlanks();
|
||||||
|
if (p.char() === ']') {
|
||||||
|
var internalSubset = source.substring(intSubsetStart, p.getIndex());
|
||||||
|
p.skip(1);
|
||||||
|
return internalSubset;
|
||||||
|
}
|
||||||
|
var current = null;
|
||||||
|
// Only in external subset
|
||||||
|
// if (char() === '<' && char(1) === '!' && char(2) === '[') {
|
||||||
|
// parseConditionalSections(p, errorHandler);
|
||||||
|
// } else
|
||||||
|
if (p.char() === '<' && p.char(1) === '!') {
|
||||||
|
switch (p.char(2)) {
|
||||||
|
case 'E': // ELEMENT | ENTITY
|
||||||
|
if (p.char(3) === 'L') {
|
||||||
|
current = p.getMatch(g.elementdecl);
|
||||||
|
} else if (p.char(3) === 'N') {
|
||||||
|
current = p.getMatch(g.EntityDecl);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case 'A': // ATTRIBUTE
|
||||||
|
current = p.getMatch(g.AttlistDecl);
|
||||||
|
break;
|
||||||
|
case 'N': // NOTATION
|
||||||
|
current = p.getMatch(g.NotationDecl);
|
||||||
|
break;
|
||||||
|
case '-': // COMMENT
|
||||||
|
current = p.getMatch(g.Comment);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
} else if (p.char() === '<' && p.char(1) === '?') {
|
||||||
|
current = parsePI(p, errorHandler);
|
||||||
|
} else if (p.char() === '%') {
|
||||||
|
current = p.getMatch(g.PEReference);
|
||||||
|
} else {
|
||||||
|
return errorHandler.fatalError('Error detected in Markup declaration');
|
||||||
|
}
|
||||||
|
if (!current) {
|
||||||
|
return errorHandler.fatalError('Error in internal subset at position ' + p.getIndex());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return errorHandler.fatalError('doctype internal subset is not well-formed, missing ]');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Called when the parser encounters an element starting with '<!'.
|
||||||
|
*
|
||||||
|
* @param {string} source
|
||||||
|
* The xml.
|
||||||
|
* @param {number} start
|
||||||
|
* the start index of the '<!'
|
||||||
|
* @param {DOMHandler} domBuilder
|
||||||
|
* @param {DOMHandler} errorHandler
|
||||||
|
* @param {boolean} isHTML
|
||||||
|
* @returns {number | never}
|
||||||
|
* The end index of the element.
|
||||||
|
* @throws {ParseError}
|
||||||
|
* In case the element is not well-formed.
|
||||||
|
*/
|
||||||
|
function parseDoctypeCommentOrCData(source, start, domBuilder, errorHandler, isHTML) {
|
||||||
|
var p = parseUtils(source, start);
|
||||||
|
|
||||||
|
switch (isHTML ? p.char(2).toUpperCase() : p.char(2)) {
|
||||||
|
case '-':
|
||||||
|
// should be a comment
|
||||||
|
var comment = p.getMatch(g.Comment);
|
||||||
|
if (comment) {
|
||||||
|
domBuilder.comment(comment, g.COMMENT_START.length, comment.length - g.COMMENT_START.length - g.COMMENT_END.length);
|
||||||
|
return p.getIndex();
|
||||||
|
} else {
|
||||||
|
return errorHandler.fatalError('comment is not well-formed at position ' + p.getIndex());
|
||||||
|
}
|
||||||
|
case '[':
|
||||||
|
// should be CDATA
|
||||||
|
var cdata = p.getMatch(g.CDSect);
|
||||||
|
if (cdata) {
|
||||||
|
if (!isHTML && !domBuilder.currentElement) {
|
||||||
|
return errorHandler.fatalError('CDATA outside of element');
|
||||||
|
}
|
||||||
|
domBuilder.startCDATA();
|
||||||
|
domBuilder.characters(cdata, g.CDATA_START.length, cdata.length - g.CDATA_START.length - g.CDATA_END.length);
|
||||||
|
domBuilder.endCDATA();
|
||||||
|
return p.getIndex();
|
||||||
|
} else {
|
||||||
|
return errorHandler.fatalError('Invalid CDATA starting at position ' + start);
|
||||||
|
}
|
||||||
|
case 'D': {
|
||||||
|
// should be DOCTYPE
|
||||||
|
if (domBuilder.doc && domBuilder.doc.documentElement) {
|
||||||
|
return errorHandler.fatalError('Doctype not allowed inside or after documentElement at position ' + p.getIndex());
|
||||||
|
}
|
||||||
|
if (isHTML ? !p.substringStartsWithCaseInsensitive(g.DOCTYPE_DECL_START) : !p.substringStartsWith(g.DOCTYPE_DECL_START)) {
|
||||||
|
return errorHandler.fatalError('Expected ' + g.DOCTYPE_DECL_START + ' at position ' + p.getIndex());
|
||||||
|
}
|
||||||
|
p.skip(g.DOCTYPE_DECL_START.length);
|
||||||
|
if (p.skipBlanks() < 1) {
|
||||||
|
return errorHandler.fatalError('Expected whitespace after ' + g.DOCTYPE_DECL_START + ' at position ' + p.getIndex());
|
||||||
|
}
|
||||||
|
|
||||||
|
var doctype = {
|
||||||
|
name: undefined,
|
||||||
|
publicId: undefined,
|
||||||
|
systemId: undefined,
|
||||||
|
internalSubset: undefined,
|
||||||
|
};
|
||||||
|
// Parse the DOCTYPE name
|
||||||
|
doctype.name = p.getMatch(g.Name);
|
||||||
|
if (!doctype.name)
|
||||||
|
return errorHandler.fatalError('doctype name missing or contains unexpected characters at position ' + p.getIndex());
|
||||||
|
|
||||||
|
if (isHTML && doctype.name.toLowerCase() !== 'html') {
|
||||||
|
errorHandler.warning('Unexpected DOCTYPE in HTML document at position ' + p.getIndex());
|
||||||
|
}
|
||||||
|
p.skipBlanks();
|
||||||
|
|
||||||
|
// Check for ExternalID
|
||||||
|
if (p.substringStartsWith(g.PUBLIC) || p.substringStartsWith(g.SYSTEM)) {
|
||||||
|
var match = g.ExternalID_match.exec(p.substringFromIndex());
|
||||||
|
if (!match) {
|
||||||
|
return errorHandler.fatalError('doctype external id is not well-formed at position ' + p.getIndex());
|
||||||
|
}
|
||||||
|
if (match.groups.SystemLiteralOnly !== undefined) {
|
||||||
|
doctype.systemId = match.groups.SystemLiteralOnly;
|
||||||
|
} else {
|
||||||
|
doctype.systemId = match.groups.SystemLiteral;
|
||||||
|
doctype.publicId = match.groups.PubidLiteral;
|
||||||
|
}
|
||||||
|
p.skip(match[0].length);
|
||||||
|
} else if (isHTML && p.substringStartsWithCaseInsensitive(g.SYSTEM)) {
|
||||||
|
// https://html.spec.whatwg.org/multipage/syntax.html#doctype-legacy-string
|
||||||
|
p.skip(g.SYSTEM.length);
|
||||||
|
if (p.skipBlanks() < 1) {
|
||||||
|
return errorHandler.fatalError('Expected whitespace after ' + g.SYSTEM + ' at position ' + p.getIndex());
|
||||||
|
}
|
||||||
|
doctype.systemId = p.getMatch(g.ABOUT_LEGACY_COMPAT_SystemLiteral);
|
||||||
|
if (!doctype.systemId) {
|
||||||
|
return errorHandler.fatalError(
|
||||||
|
'Expected ' + g.ABOUT_LEGACY_COMPAT + ' in single or double quotes after ' + g.SYSTEM + ' at position ' + p.getIndex()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (isHTML && doctype.systemId && !g.ABOUT_LEGACY_COMPAT_SystemLiteral.test(doctype.systemId)) {
|
||||||
|
errorHandler.warning('Unexpected doctype.systemId in HTML document at position ' + p.getIndex());
|
||||||
|
}
|
||||||
|
if (!isHTML) {
|
||||||
|
p.skipBlanks();
|
||||||
|
doctype.internalSubset = parseDoctypeInternalSubset(p, errorHandler);
|
||||||
|
}
|
||||||
|
p.skipBlanks();
|
||||||
|
if (p.char() !== '>') {
|
||||||
|
return errorHandler.fatalError('doctype not terminated with > at position ' + p.getIndex());
|
||||||
|
}
|
||||||
|
p.skip(1);
|
||||||
|
domBuilder.startDTD(doctype.name, doctype.publicId, doctype.systemId, doctype.internalSubset);
|
||||||
|
domBuilder.endDTD();
|
||||||
|
return p.getIndex();
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
return errorHandler.fatalError('Not well-formed XML starting with "<!" at position ' + start);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseProcessingInstruction(source, start, domBuilder, errorHandler) {
|
||||||
|
var match = source.substring(start).match(g.PI);
|
||||||
|
if (!match) {
|
||||||
|
return errorHandler.fatalError('Invalid processing instruction starting at position ' + start);
|
||||||
|
}
|
||||||
|
if (match[1].toLowerCase() === 'xml') {
|
||||||
|
if (start > 0) {
|
||||||
|
return errorHandler.fatalError(
|
||||||
|
'processing instruction at position ' + start + ' is an xml declaration which is only at the start of the document'
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if (!g.XMLDecl.test(source.substring(start))) {
|
||||||
|
return errorHandler.fatalError('xml declaration is not well-formed');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
domBuilder.processingInstruction(match[1], match[2]);
|
||||||
|
return start + match[0].length;
|
||||||
|
}
|
||||||
|
|
||||||
|
function ElementAttributes() {
|
||||||
|
this.attributeNames = Object.create(null);
|
||||||
|
}
|
||||||
|
|
||||||
|
ElementAttributes.prototype = {
|
||||||
|
setTagName: function (tagName) {
|
||||||
|
if (!g.QName_exact.test(tagName)) {
|
||||||
|
throw new Error('invalid tagName:' + tagName);
|
||||||
|
}
|
||||||
|
this.tagName = tagName;
|
||||||
|
},
|
||||||
|
addValue: function (qName, value, offset) {
|
||||||
|
if (!g.QName_exact.test(qName)) {
|
||||||
|
throw new Error('invalid attribute:' + qName);
|
||||||
|
}
|
||||||
|
this.attributeNames[qName] = this.length;
|
||||||
|
this[this.length++] = { qName: qName, value: value, offset: offset };
|
||||||
|
},
|
||||||
|
length: 0,
|
||||||
|
getLocalName: function (i) {
|
||||||
|
return this[i].localName;
|
||||||
|
},
|
||||||
|
getLocator: function (i) {
|
||||||
|
return this[i].locator;
|
||||||
|
},
|
||||||
|
getQName: function (i) {
|
||||||
|
return this[i].qName;
|
||||||
|
},
|
||||||
|
getURI: function (i) {
|
||||||
|
return this[i].uri;
|
||||||
|
},
|
||||||
|
getValue: function (i) {
|
||||||
|
return this[i].value;
|
||||||
|
},
|
||||||
|
// ,getIndex:function(uri, localName)){
|
||||||
|
// if(localName){
|
||||||
|
//
|
||||||
|
// }else{
|
||||||
|
// var qName = uri
|
||||||
|
// }
|
||||||
|
// },
|
||||||
|
// getValue:function(){return this.getValue(this.getIndex.apply(this,arguments))},
|
||||||
|
// getType:function(uri,localName){}
|
||||||
|
// getType:function(i){},
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.XMLReader = XMLReader;
|
||||||
|
exports.parseUtils = parseUtils;
|
||||||
|
exports.parseDoctypeCommentOrCData = parseDoctypeCommentOrCData;
|
||||||
73
tradeCattle/aiotagro-cattle-trade/node_modules/@xmldom/xmldom/package.json
generated
vendored
Normal file
73
tradeCattle/aiotagro-cattle-trade/node_modules/@xmldom/xmldom/package.json
generated
vendored
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
{
|
||||||
|
"name": "@xmldom/xmldom",
|
||||||
|
"version": "0.9.8",
|
||||||
|
"description": "A pure JavaScript W3C standard-based (XML DOM Level 2 Core) DOMParser and XMLSerializer module.",
|
||||||
|
"keywords": [
|
||||||
|
"w3c",
|
||||||
|
"dom",
|
||||||
|
"xml",
|
||||||
|
"parser",
|
||||||
|
"javascript",
|
||||||
|
"DOMParser",
|
||||||
|
"XMLSerializer",
|
||||||
|
"ponyfill"
|
||||||
|
],
|
||||||
|
"homepage": "https://github.com/xmldom/xmldom",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git://github.com/xmldom/xmldom.git"
|
||||||
|
},
|
||||||
|
"main": "lib/index.js",
|
||||||
|
"types": "index.d.ts",
|
||||||
|
"files": [
|
||||||
|
"CHANGELOG.md",
|
||||||
|
"LICENSE",
|
||||||
|
"readme.md",
|
||||||
|
"SECURITY.md",
|
||||||
|
"index.d.ts",
|
||||||
|
"lib"
|
||||||
|
],
|
||||||
|
"scripts": {
|
||||||
|
"lint": "eslint examples lib test",
|
||||||
|
"format": "prettier --write examples lib test index.d.ts",
|
||||||
|
"changelog": "auto-changelog --unreleased-only",
|
||||||
|
"start": "nodemon --watch package.json --watch lib --watch test --exec 'npm --silent run test && npm --silent run lint'",
|
||||||
|
"test": "jest",
|
||||||
|
"fuzz": "jest --config=./jest.fuzz.config.js",
|
||||||
|
"test:types": "cd examples/typescript-node-es6 && ./pretest.sh 3 && ./pretest.sh 4 && ./pretest.sh 5 && node dist/index.js",
|
||||||
|
"testrelease": "npm test && eslint lib",
|
||||||
|
"version": "./changelog-has-version.sh",
|
||||||
|
"release": "np --no-yarn --test-script testrelease"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=14.6"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@homer0/prettier-plugin-jsdoc": "9.1.0",
|
||||||
|
"auto-changelog": "2.5.0",
|
||||||
|
"eslint": "8.57.1",
|
||||||
|
"eslint-config-prettier": "10.0.1",
|
||||||
|
"eslint-plugin-anti-trojan-source": "1.1.1",
|
||||||
|
"eslint-plugin-es5": "1.5.0",
|
||||||
|
"eslint-plugin-n": "17.15.1",
|
||||||
|
"eslint-plugin-prettier": "5.2.3",
|
||||||
|
"get-stream": "6.0.1",
|
||||||
|
"jest": "29.7.0",
|
||||||
|
"nodemon": "3.1.9",
|
||||||
|
"np": "8.0.4",
|
||||||
|
"prettier": "3.5.2",
|
||||||
|
"xmltest": "2.0.3",
|
||||||
|
"yauzl": "3.2.0"
|
||||||
|
},
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://github.com/xmldom/xmldom/issues"
|
||||||
|
},
|
||||||
|
"license": "MIT",
|
||||||
|
"auto-changelog": {
|
||||||
|
"prepend": true,
|
||||||
|
"remote": "origin",
|
||||||
|
"tagPrefix": "",
|
||||||
|
"template": "./auto-changelog.hbs"
|
||||||
|
},
|
||||||
|
"packageManager": "npm@11.1.0+sha512.acf301ad9b9ddba948fcb72341e2f0fcae477f56a95cc2a092934d133a7461062633cefbf93d5934a3dc0768674e2edee9f04dcfcc4bb4c327ff0e3a7d552a1b"
|
||||||
|
}
|
||||||
357
tradeCattle/aiotagro-cattle-trade/node_modules/@xmldom/xmldom/readme.md
generated
vendored
Normal file
357
tradeCattle/aiotagro-cattle-trade/node_modules/@xmldom/xmldom/readme.md
generated
vendored
Normal file
@@ -0,0 +1,357 @@
|
|||||||
|
# @xmldom/xmldom
|
||||||
|
|
||||||
|
***Since version 0.7.0 this package is published to npm as [`@xmldom/xmldom`](https://www.npmjs.com/package/@xmldom/xmldom) and no longer as [`xmldom`](https://www.npmjs.com/package/xmldom), because [we are no longer able to publish `xmldom`](https://github.com/xmldom/xmldom/issues/271).***
|
||||||
|
*For better readability in the docs, we will continue to talk about this library as "xmldom".*
|
||||||
|
|
||||||
|
[](https://github.com/xmldom/xmldom/blob/master/LICENSE)
|
||||||
|
[](https://socket.dev/npm/package/@xmldom/xmldom)
|
||||||
|
[](https://codecov.io/gh/xmldom/xmldom)
|
||||||
|
[](https://packagephobia.com/result?p=@xmldom/xmldom)
|
||||||
|
|
||||||
|
[](https://www.bestpractices.dev/projects/7879)
|
||||||
|
[](https://securityscorecards.dev/viewer/?uri=github.com/xmldom/xmldom)
|
||||||
|
[](https://socket.dev/npm/package/@xmldom/xmldom)
|
||||||
|
[](https://snyk.io/advisor/npm-package/@xmldom/xmldom)
|
||||||
|
|
||||||
|
[](https://www.npmjs.com/package/@xmldom/xmldom)
|
||||||
|
[](https://www.npmjs.com/package/@xmldom/xmldom?activeTab=versions)
|
||||||
|
[](https://www.npmjs.com/package/@xmldom/xmldom?activeTab=versions)
|
||||||
|
|
||||||
|
[](https://github.com/xmldom/xmldom/issues?q=is%3Aissue+is%3Aopen+label%3Abug)
|
||||||
|
[](https://github.com/xmldom/xmldom/issues?q=is%3Aissue+is%3Aopen+label%3Ahelp-wanted)
|
||||||
|
|
||||||
|
xmldom is a javascript [ponyfill](https://ponyfill.com/) to provide the following APIs [that are present in modern browsers](https://caniuse.com/xml-serializer) to other runtimes:
|
||||||
|
- convert an XML string into a DOM tree
|
||||||
|
```
|
||||||
|
new DOMParser().parseFromString(xml, mimeType) => Document
|
||||||
|
```
|
||||||
|
- create, access and modify a DOM tree
|
||||||
|
```
|
||||||
|
new DOMImplementation().createDocument(...) => Document
|
||||||
|
```
|
||||||
|
- serialize a DOM tree back into an XML string
|
||||||
|
```
|
||||||
|
new XMLSerializer().serializeToString(node) => string
|
||||||
|
```
|
||||||
|
|
||||||
|
The target runtimes `xmldom` supports are currently Node >= v14.6 (and very likely any other [ES5 compatible runtime](https://compat-table.github.io/compat-table/es5/)).
|
||||||
|
|
||||||
|
When deciding how to fix bugs or implement features, `xmldom` tries to stay as close as possible to the various [related specifications/standards](#specs).
|
||||||
|
As indicated by the version starting with `0.`, this implementation is not feature complete and some implemented features differ from what the specifications describe.
|
||||||
|
**Issues and PRs for such differences are always welcome, even when they only provide a failing test case.**
|
||||||
|
|
||||||
|
This project was forked from it's [original source](https://github.com/jindw/xmldom) in 2019, more details about that transition can be found in the [CHANGELOG](CHANGELOG.md#maintainer-changes).
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Install:
|
||||||
|
|
||||||
|
```
|
||||||
|
npm install @xmldom/xmldom
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example:
|
||||||
|
|
||||||
|
[In NodeJS](examples/nodejs/src/index.js)
|
||||||
|
```javascript
|
||||||
|
const { DOMParser, XMLSerializer } = require('@xmldom/xmldom')
|
||||||
|
|
||||||
|
const source = `<xml xmlns="a">
|
||||||
|
<child>test</child>
|
||||||
|
<child/>
|
||||||
|
</xml>`
|
||||||
|
|
||||||
|
const doc = new DOMParser().parseFromString(source, 'text/xml')
|
||||||
|
|
||||||
|
const serialized = new XMLSerializer().serializeToString(doc)
|
||||||
|
```
|
||||||
|
|
||||||
|
Note: in Typescript ~~and ES6~~ (see [#316](https://github.com/xmldom/xmldom/issues/316)) you can use the `import` approach, as follows:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { DOMParser } from '@xmldom/xmldom'
|
||||||
|
```
|
||||||
|
|
||||||
|
## API Reference
|
||||||
|
|
||||||
|
* [DOMParser](https://developer.mozilla.org/en-US/docs/Web/API/DOMParser):
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
parseFromString(xmlsource, mimeType)
|
||||||
|
```
|
||||||
|
* **options extension** _by xmldom_ (not DOM standard!!)
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
// the options argument can be used to modify behavior
|
||||||
|
// for more details check the documentation on the code or type definition
|
||||||
|
new DOMParser(options)
|
||||||
|
```
|
||||||
|
|
||||||
|
* [XMLSerializer](https://developer.mozilla.org/en-US/docs/Web/API/XMLSerializer)
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
serializeToString(node)
|
||||||
|
```
|
||||||
|
### DOM level2 method and attribute:
|
||||||
|
|
||||||
|
* [Node](http://www.w3.org/TR/2000/REC-DOM-Level-2-Core-20001113/core.html#ID-1950641247)
|
||||||
|
|
||||||
|
readonly class properties (aka `NodeType`),
|
||||||
|
these can be accessed from any `Node` instance `node`:
|
||||||
|
`if (node.nodeType === node.ELEMENT_NODE) {...`
|
||||||
|
|
||||||
|
1. `ELEMENT_NODE` (`1`)
|
||||||
|
2. `ATTRIBUTE_NODE` (`2`)
|
||||||
|
3. `TEXT_NODE` (`3`)
|
||||||
|
4. `CDATA_SECTION_NODE` (`4`)
|
||||||
|
5. `ENTITY_REFERENCE_NODE` (`5`)
|
||||||
|
6. `ENTITY_NODE` (`6`)
|
||||||
|
7. `PROCESSING_INSTRUCTION_NODE` (`7`)
|
||||||
|
8. `COMMENT_NODE` (`8`)
|
||||||
|
9. `DOCUMENT_NODE` (`9`)
|
||||||
|
10. `DOCUMENT_TYPE_NODE` (`10`)
|
||||||
|
11. `DOCUMENT_FRAGMENT_NODE` (`11`)
|
||||||
|
12. `NOTATION_NODE` (`12`)
|
||||||
|
|
||||||
|
attribute:
|
||||||
|
- `nodeValue` | `prefix` | `textContent`
|
||||||
|
|
||||||
|
readonly attribute:
|
||||||
|
- `nodeName` | `nodeType` | `parentNode` | `parentElement` | `childNodes` | `firstChild` | `lastChild` | `previousSibling` | `nextSibling` | `attributes` | `ownerDocument` | `namespaceURI` | `localName` | `isConnected` | `baseURI`
|
||||||
|
|
||||||
|
method:
|
||||||
|
* `insertBefore(newChild, refChild)`
|
||||||
|
* `replaceChild(newChild, oldChild)`
|
||||||
|
* `removeChild(oldChild)`
|
||||||
|
* `appendChild(newChild)`
|
||||||
|
* `hasChildNodes()`
|
||||||
|
* `cloneNode(deep)`
|
||||||
|
* `normalize()`
|
||||||
|
* `contains(otherNode)`
|
||||||
|
* `getRootNode()`
|
||||||
|
* `isEqualNode(otherNode)`
|
||||||
|
* `isSameNode(otherNode)`
|
||||||
|
* `isSupported(feature, version)`
|
||||||
|
* `hasAttributes()`
|
||||||
|
* [DOMException](http://www.w3.org/TR/2000/REC-DOM-Level-2-Core-20001113/ecma-script-binding.html)
|
||||||
|
|
||||||
|
extends the Error type thrown as part of DOM API.
|
||||||
|
|
||||||
|
readonly class properties:
|
||||||
|
- `INDEX_SIZE_ERR` (`1`)
|
||||||
|
- `DOMSTRING_SIZE_ERR` (`2`)
|
||||||
|
- `HIERARCHY_REQUEST_ERR` (`3`)
|
||||||
|
- `WRONG_DOCUMENT_ERR` (`4`)
|
||||||
|
- `INVALID_CHARACTER_ERR` (`5`)
|
||||||
|
- `NO_DATA_ALLOWED_ERR` (`6`)
|
||||||
|
- `NO_MODIFICATION_ALLOWED_ERR` (`7`)
|
||||||
|
- `NOT_FOUND_ERR` (`8`)
|
||||||
|
- `NOT_SUPPORTED_ERR` (`9`)
|
||||||
|
- `INUSE_ATTRIBUTE_ERR` (`10`)
|
||||||
|
- `INVALID_STATE_ERR` (`11`)
|
||||||
|
- `SYNTAX_ERR` (`12`)
|
||||||
|
- `INVALID_MODIFICATION_ERR` (`13`)
|
||||||
|
- `NAMESPACE_ERR` (`14`)
|
||||||
|
- `INVALID_ACCESS_ERR` (`15`)
|
||||||
|
|
||||||
|
attributes:
|
||||||
|
- `code` with a value matching one of the above constants.
|
||||||
|
|
||||||
|
* [DOMImplementation](http://www.w3.org/TR/2000/REC-DOM-Level-2-Core-20001113/core.html#ID-102161490)
|
||||||
|
|
||||||
|
method:
|
||||||
|
- `hasFeature(feature, version)` (deprecated)
|
||||||
|
- `createDocumentType(qualifiedName, publicId, systemId)`
|
||||||
|
- `createDocument(namespaceURI, qualifiedName, doctype)`
|
||||||
|
|
||||||
|
* [Document](http://www.w3.org/TR/2000/REC-DOM-Level-2-Core-20001113/core.html#i-Document) : Node
|
||||||
|
|
||||||
|
readonly attribute:
|
||||||
|
- `doctype` | `implementation` | `documentElement`
|
||||||
|
|
||||||
|
method:
|
||||||
|
- `createElement(tagName)`
|
||||||
|
- `createDocumentFragment()`
|
||||||
|
- `createTextNode(data)`
|
||||||
|
- `createComment(data)`
|
||||||
|
- `createCDATASection(data)`
|
||||||
|
- `createProcessingInstruction(target, data)`
|
||||||
|
- `createAttribute(name)`
|
||||||
|
- `createEntityReference(name)`
|
||||||
|
- `getElementsByTagName(tagname)`
|
||||||
|
- `importNode(importedNode, deep)`
|
||||||
|
- `createElementNS(namespaceURI, qualifiedName)`
|
||||||
|
- `createAttributeNS(namespaceURI, qualifiedName)`
|
||||||
|
- `getElementsByTagNameNS(namespaceURI, localName)`
|
||||||
|
- `getElementById(elementId)`
|
||||||
|
|
||||||
|
* [DocumentFragment](http://www.w3.org/TR/2000/REC-DOM-Level-2-Core-20001113/core.html#ID-B63ED1A3) : Node
|
||||||
|
* [Element](http://www.w3.org/TR/2000/REC-DOM-Level-2-Core-20001113/core.html#ID-745549614) : Node
|
||||||
|
|
||||||
|
readonly attribute:
|
||||||
|
- `tagName`
|
||||||
|
|
||||||
|
method:
|
||||||
|
- `getAttribute(name)`
|
||||||
|
- `setAttribute(name, value)`
|
||||||
|
- `removeAttribute(name)`
|
||||||
|
- `getAttributeNode(name)`
|
||||||
|
- `setAttributeNode(newAttr)`
|
||||||
|
- `removeAttributeNode(oldAttr)`
|
||||||
|
- `getElementsByTagName(name)`
|
||||||
|
- `getAttributeNS(namespaceURI, localName)`
|
||||||
|
- `setAttributeNS(namespaceURI, qualifiedName, value)`
|
||||||
|
- `removeAttributeNS(namespaceURI, localName)`
|
||||||
|
- `getAttributeNodeNS(namespaceURI, localName)`
|
||||||
|
- `setAttributeNodeNS(newAttr)`
|
||||||
|
- `getElementsByTagNameNS(namespaceURI, localName)`
|
||||||
|
- `hasAttribute(name)`
|
||||||
|
- `hasAttributeNS(namespaceURI, localName)`
|
||||||
|
|
||||||
|
* [Attr](http://www.w3.org/TR/2000/REC-DOM-Level-2-Core-20001113/core.html#ID-637646024) : Node
|
||||||
|
|
||||||
|
attribute:
|
||||||
|
- `value`
|
||||||
|
|
||||||
|
readonly attribute:
|
||||||
|
- `name` | `specified` | `ownerElement`
|
||||||
|
|
||||||
|
* [NodeList](http://www.w3.org/TR/2000/REC-DOM-Level-2-Core-20001113/core.html#ID-536297177)
|
||||||
|
|
||||||
|
readonly attribute:
|
||||||
|
- `length`
|
||||||
|
|
||||||
|
method:
|
||||||
|
- `item(index)`
|
||||||
|
|
||||||
|
* [NamedNodeMap](http://www.w3.org/TR/2000/REC-DOM-Level-2-Core-20001113/core.html#ID-1780488922)
|
||||||
|
|
||||||
|
readonly attribute:
|
||||||
|
- `length`
|
||||||
|
|
||||||
|
method:
|
||||||
|
- `getNamedItem(name)`
|
||||||
|
- `setNamedItem(arg)`
|
||||||
|
- `removeNamedItem(name)`
|
||||||
|
- `item(index)`
|
||||||
|
- `getNamedItemNS(namespaceURI, localName)`
|
||||||
|
- `setNamedItemNS(arg)`
|
||||||
|
- `removeNamedItemNS(namespaceURI, localName)`
|
||||||
|
|
||||||
|
* [CharacterData](http://www.w3.org/TR/2000/REC-DOM-Level-2-Core-20001113/core.html#ID-FF21A306) : Node
|
||||||
|
|
||||||
|
method:
|
||||||
|
- `substringData(offset, count)`
|
||||||
|
- `appendData(arg)`
|
||||||
|
- `insertData(offset, arg)`
|
||||||
|
- `deleteData(offset, count)`
|
||||||
|
- `replaceData(offset, count, arg)`
|
||||||
|
|
||||||
|
* [Text](http://www.w3.org/TR/2000/REC-DOM-Level-2-Core-20001113/core.html#ID-1312295772) : CharacterData
|
||||||
|
|
||||||
|
method:
|
||||||
|
- `splitText(offset)`
|
||||||
|
|
||||||
|
* [CDATASection](http://www.w3.org/TR/2000/REC-DOM-Level-2-Core-20001113/core.html#ID-667469212)
|
||||||
|
* [Comment](http://www.w3.org/TR/2000/REC-DOM-Level-2-Core-20001113/core.html#ID-1728279322) : CharacterData
|
||||||
|
|
||||||
|
* [DocumentType](http://www.w3.org/TR/2000/REC-DOM-Level-2-Core-20001113/core.html#ID-412266927)
|
||||||
|
|
||||||
|
readonly attribute:
|
||||||
|
- `name` | `entities` | `notations` | `publicId` | `systemId` | `internalSubset`
|
||||||
|
|
||||||
|
* Notation : Node
|
||||||
|
|
||||||
|
readonly attribute:
|
||||||
|
- `publicId` | `systemId`
|
||||||
|
|
||||||
|
* Entity : Node
|
||||||
|
|
||||||
|
readonly attribute:
|
||||||
|
- `publicId` | `systemId` | `notationName`
|
||||||
|
|
||||||
|
* EntityReference : Node
|
||||||
|
* ProcessingInstruction : Node
|
||||||
|
|
||||||
|
attribute:
|
||||||
|
- `data`
|
||||||
|
readonly attribute:
|
||||||
|
- `target`
|
||||||
|
|
||||||
|
### DOM level 3 support:
|
||||||
|
|
||||||
|
* [Node](http://www.w3.org/TR/DOM-Level-3-Core/core.html#Node3-textContent)
|
||||||
|
|
||||||
|
attribute:
|
||||||
|
- `textContent`
|
||||||
|
|
||||||
|
method:
|
||||||
|
- `isDefaultNamespace(namespaceURI)`
|
||||||
|
- `lookupNamespaceURI(prefix)`
|
||||||
|
|
||||||
|
### DOM extension by xmldom
|
||||||
|
|
||||||
|
* [Node] Source position extension;
|
||||||
|
|
||||||
|
attribute:
|
||||||
|
- `lineNumber` //number starting from `1`
|
||||||
|
- `columnNumber` //number starting from `1`
|
||||||
|
|
||||||
|
## Specs
|
||||||
|
|
||||||
|
The implementation is based on several specifications:
|
||||||
|
|
||||||
|
<!-- Should open in new tab and the links in the SVG should be clickable there! -->
|
||||||
|
<a href="https://raw.githubusercontent.com/xmldom/xmldom/master/docs/specs.svg" target="_blank" rel="noopener noreferrer nofollow" ></a>
|
||||||
|
|
||||||
|
### DOM Parsing and Serialization
|
||||||
|
|
||||||
|
From the [W3C DOM Parsing and Serialization (WD 2016)](https://www.w3.org/TR/2016/WD-DOM-Parsing-20160517/) `xmldom` provides an implementation for the interfaces:
|
||||||
|
- `DOMParser`
|
||||||
|
- `XMLSerializer`
|
||||||
|
|
||||||
|
Note that there are some known deviations between this implementation and the W3 specifications.
|
||||||
|
|
||||||
|
Note: [The latest version of this spec](https://w3c.github.io/DOM-Parsing/) has the status "Editors Draft", since it is under active development. One major change is that [the definition of the `DOMParser` interface has been moved to the HTML spec](https://w3c.github.io/DOM-Parsing/#the-domparser-interface)
|
||||||
|
|
||||||
|
|
||||||
|
### DOM
|
||||||
|
|
||||||
|
The original author claims that xmldom implements [DOM Level 2] in a "fully compatible" way and some parts of [DOM Level 3], but there are not enough tests to prove this. Both Specifications are now superseded by the [DOM Level 4 aka Living standard] wich has a much broader scope than xmldom.
|
||||||
|
In the past, there have been multiple (even breaking) changes to align xmldom with the living standard,
|
||||||
|
so if you find a difference that is not documented, any contribution to resolve the difference is very welcome (even just reporting it as an issue).
|
||||||
|
|
||||||
|
xmldom implements the following interfaces:
|
||||||
|
- `Attr`
|
||||||
|
- `CDATASection`
|
||||||
|
- `CharacterData`
|
||||||
|
- `Comment`
|
||||||
|
- `Document`
|
||||||
|
- `DocumentFragment`
|
||||||
|
- `DocumentType`
|
||||||
|
- `DOMException`
|
||||||
|
- `DOMImplementation`
|
||||||
|
- `Element`
|
||||||
|
- `Entity`
|
||||||
|
- `EntityReference`
|
||||||
|
- `LiveNodeList`
|
||||||
|
- `NamedNodeMap`
|
||||||
|
- `Node`
|
||||||
|
- `NodeList`
|
||||||
|
- `Notation`
|
||||||
|
- `ProcessingInstruction`
|
||||||
|
- `Text`
|
||||||
|
|
||||||
|
more details are available in the (incomplete) [API Reference](#api-reference) section.
|
||||||
|
|
||||||
|
### HTML
|
||||||
|
|
||||||
|
xmldom does not have any goal of supporting the full spec, but it has some capability to parse, report and serialize things differently when it is told to parse HTML (by passing the HTML namespace).
|
||||||
|
|
||||||
|
### SAX, XML, XMLNS
|
||||||
|
|
||||||
|
xmldom has an own SAX parser implementation to do the actual parsing, which implements some interfaces in alignment with the Java interfaces SAX defines:
|
||||||
|
- `XMLReader`
|
||||||
|
- `DOMHandler`
|
||||||
|
|
||||||
|
There is an idea/proposal to make it possible to replace it with something else in <https://github.com/xmldom/xmldom/issues/55>
|
||||||
19
tradeCattle/aiotagro-cattle-trade/node_modules/core-util-is/LICENSE
generated
vendored
Normal file
19
tradeCattle/aiotagro-cattle-trade/node_modules/core-util-is/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
Copyright Node.js contributors. All rights reserved.
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to
|
||||||
|
deal in the Software without restriction, including without limitation the
|
||||||
|
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||||
|
sell copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||||
|
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||||
|
IN THE SOFTWARE.
|
||||||
3
tradeCattle/aiotagro-cattle-trade/node_modules/core-util-is/README.md
generated
vendored
Normal file
3
tradeCattle/aiotagro-cattle-trade/node_modules/core-util-is/README.md
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
# core-util-is
|
||||||
|
|
||||||
|
The `util.is*` functions introduced in Node v0.12.
|
||||||
107
tradeCattle/aiotagro-cattle-trade/node_modules/core-util-is/lib/util.js
generated
vendored
Normal file
107
tradeCattle/aiotagro-cattle-trade/node_modules/core-util-is/lib/util.js
generated
vendored
Normal file
@@ -0,0 +1,107 @@
|
|||||||
|
// Copyright Joyent, Inc. and other Node contributors.
|
||||||
|
//
|
||||||
|
// Permission is hereby granted, free of charge, to any person obtaining a
|
||||||
|
// copy of this software and associated documentation files (the
|
||||||
|
// "Software"), to deal in the Software without restriction, including
|
||||||
|
// without limitation the rights to use, copy, modify, merge, publish,
|
||||||
|
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||||||
|
// persons to whom the Software is furnished to do so, subject to the
|
||||||
|
// following conditions:
|
||||||
|
//
|
||||||
|
// The above copyright notice and this permission notice shall be included
|
||||||
|
// in all copies or substantial portions of the Software.
|
||||||
|
//
|
||||||
|
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||||
|
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||||
|
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||||
|
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||||
|
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||||
|
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||||
|
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
|
|
||||||
|
// NOTE: These type checking functions intentionally don't use `instanceof`
|
||||||
|
// because it is fragile and can be easily faked with `Object.create()`.
|
||||||
|
|
||||||
|
function isArray(arg) {
|
||||||
|
if (Array.isArray) {
|
||||||
|
return Array.isArray(arg);
|
||||||
|
}
|
||||||
|
return objectToString(arg) === '[object Array]';
|
||||||
|
}
|
||||||
|
exports.isArray = isArray;
|
||||||
|
|
||||||
|
function isBoolean(arg) {
|
||||||
|
return typeof arg === 'boolean';
|
||||||
|
}
|
||||||
|
exports.isBoolean = isBoolean;
|
||||||
|
|
||||||
|
function isNull(arg) {
|
||||||
|
return arg === null;
|
||||||
|
}
|
||||||
|
exports.isNull = isNull;
|
||||||
|
|
||||||
|
function isNullOrUndefined(arg) {
|
||||||
|
return arg == null;
|
||||||
|
}
|
||||||
|
exports.isNullOrUndefined = isNullOrUndefined;
|
||||||
|
|
||||||
|
function isNumber(arg) {
|
||||||
|
return typeof arg === 'number';
|
||||||
|
}
|
||||||
|
exports.isNumber = isNumber;
|
||||||
|
|
||||||
|
function isString(arg) {
|
||||||
|
return typeof arg === 'string';
|
||||||
|
}
|
||||||
|
exports.isString = isString;
|
||||||
|
|
||||||
|
function isSymbol(arg) {
|
||||||
|
return typeof arg === 'symbol';
|
||||||
|
}
|
||||||
|
exports.isSymbol = isSymbol;
|
||||||
|
|
||||||
|
function isUndefined(arg) {
|
||||||
|
return arg === void 0;
|
||||||
|
}
|
||||||
|
exports.isUndefined = isUndefined;
|
||||||
|
|
||||||
|
function isRegExp(re) {
|
||||||
|
return objectToString(re) === '[object RegExp]';
|
||||||
|
}
|
||||||
|
exports.isRegExp = isRegExp;
|
||||||
|
|
||||||
|
function isObject(arg) {
|
||||||
|
return typeof arg === 'object' && arg !== null;
|
||||||
|
}
|
||||||
|
exports.isObject = isObject;
|
||||||
|
|
||||||
|
function isDate(d) {
|
||||||
|
return objectToString(d) === '[object Date]';
|
||||||
|
}
|
||||||
|
exports.isDate = isDate;
|
||||||
|
|
||||||
|
function isError(e) {
|
||||||
|
return (objectToString(e) === '[object Error]' || e instanceof Error);
|
||||||
|
}
|
||||||
|
exports.isError = isError;
|
||||||
|
|
||||||
|
function isFunction(arg) {
|
||||||
|
return typeof arg === 'function';
|
||||||
|
}
|
||||||
|
exports.isFunction = isFunction;
|
||||||
|
|
||||||
|
function isPrimitive(arg) {
|
||||||
|
return arg === null ||
|
||||||
|
typeof arg === 'boolean' ||
|
||||||
|
typeof arg === 'number' ||
|
||||||
|
typeof arg === 'string' ||
|
||||||
|
typeof arg === 'symbol' || // ES6 symbol
|
||||||
|
typeof arg === 'undefined';
|
||||||
|
}
|
||||||
|
exports.isPrimitive = isPrimitive;
|
||||||
|
|
||||||
|
exports.isBuffer = require('buffer').Buffer.isBuffer;
|
||||||
|
|
||||||
|
function objectToString(o) {
|
||||||
|
return Object.prototype.toString.call(o);
|
||||||
|
}
|
||||||
38
tradeCattle/aiotagro-cattle-trade/node_modules/core-util-is/package.json
generated
vendored
Normal file
38
tradeCattle/aiotagro-cattle-trade/node_modules/core-util-is/package.json
generated
vendored
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
{
|
||||||
|
"name": "core-util-is",
|
||||||
|
"version": "1.0.3",
|
||||||
|
"description": "The `util.is*` functions introduced in Node v0.12.",
|
||||||
|
"main": "lib/util.js",
|
||||||
|
"files": [
|
||||||
|
"lib"
|
||||||
|
],
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git://github.com/isaacs/core-util-is"
|
||||||
|
},
|
||||||
|
"keywords": [
|
||||||
|
"util",
|
||||||
|
"isBuffer",
|
||||||
|
"isArray",
|
||||||
|
"isNumber",
|
||||||
|
"isString",
|
||||||
|
"isRegExp",
|
||||||
|
"isThis",
|
||||||
|
"isThat",
|
||||||
|
"polyfill"
|
||||||
|
],
|
||||||
|
"author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
|
||||||
|
"license": "MIT",
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://github.com/isaacs/core-util-is/issues"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"test": "tap test.js",
|
||||||
|
"preversion": "npm test",
|
||||||
|
"postversion": "npm publish",
|
||||||
|
"prepublishOnly": "git push origin --follow-tags"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"tap": "^15.0.9"
|
||||||
|
}
|
||||||
|
}
|
||||||
1
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/.ignore
generated
vendored
Normal file
1
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/.ignore
generated
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
vendor
|
||||||
3355
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/CHANGELOG.md
generated
vendored
Normal file
3355
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/CHANGELOG.md
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
651
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/LICENSE.md
generated
vendored
Normal file
651
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/LICENSE.md
generated
vendored
Normal file
@@ -0,0 +1,651 @@
|
|||||||
|
docxtemplater is dual licensed. You may use it under the MIT license *or* the GPLv3
|
||||||
|
license.
|
||||||
|
|
||||||
|
The MIT License
|
||||||
|
===============
|
||||||
|
|
||||||
|
Copyright (c) Edgar HIPP
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
|
|
||||||
|
|
||||||
|
GPL version 3
|
||||||
|
=============
|
||||||
|
|
||||||
|
GNU GENERAL PUBLIC LICENSE
|
||||||
|
Version 3, 29 June 2007
|
||||||
|
|
||||||
|
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||||
|
Everyone is permitted to copy and distribute verbatim copies
|
||||||
|
of this license document, but changing it is not allowed.
|
||||||
|
|
||||||
|
Preamble
|
||||||
|
|
||||||
|
The GNU General Public License is a free, copyleft license for
|
||||||
|
software and other kinds of works.
|
||||||
|
|
||||||
|
The licenses for most software and other practical works are designed
|
||||||
|
to take away your freedom to share and change the works. By contrast,
|
||||||
|
the GNU General Public License is intended to guarantee your freedom to
|
||||||
|
share and change all versions of a program--to make sure it remains free
|
||||||
|
software for all its users. We, the Free Software Foundation, use the
|
||||||
|
GNU General Public License for most of our software; it applies also to
|
||||||
|
any other work released this way by its authors. You can apply it to
|
||||||
|
your programs, too.
|
||||||
|
|
||||||
|
When we speak of free software, we are referring to freedom, not
|
||||||
|
price. Our General Public Licenses are designed to make sure that you
|
||||||
|
have the freedom to distribute copies of free software (and charge for
|
||||||
|
them if you wish), that you receive source code or can get it if you
|
||||||
|
want it, that you can change the software or use pieces of it in new
|
||||||
|
free programs, and that you know you can do these things.
|
||||||
|
|
||||||
|
To protect your rights, we need to prevent others from denying you
|
||||||
|
these rights or asking you to surrender the rights. Therefore, you have
|
||||||
|
certain responsibilities if you distribute copies of the software, or if
|
||||||
|
you modify it: responsibilities to respect the freedom of others.
|
||||||
|
|
||||||
|
For example, if you distribute copies of such a program, whether
|
||||||
|
gratis or for a fee, you must pass on to the recipients the same
|
||||||
|
freedoms that you received. You must make sure that they, too, receive
|
||||||
|
or can get the source code. And you must show them these terms so they
|
||||||
|
know their rights.
|
||||||
|
|
||||||
|
Developers that use the GNU GPL protect your rights with two steps:
|
||||||
|
(1) assert copyright on the software, and (2) offer you this License
|
||||||
|
giving you legal permission to copy, distribute and/or modify it.
|
||||||
|
|
||||||
|
For the developers' and authors' protection, the GPL clearly explains
|
||||||
|
that there is no warranty for this free software. For both users' and
|
||||||
|
authors' sake, the GPL requires that modified versions be marked as
|
||||||
|
changed, so that their problems will not be attributed erroneously to
|
||||||
|
authors of previous versions.
|
||||||
|
|
||||||
|
Some devices are designed to deny users access to install or run
|
||||||
|
modified versions of the software inside them, although the manufacturer
|
||||||
|
can do so. This is fundamentally incompatible with the aim of
|
||||||
|
protecting users' freedom to change the software. The systematic
|
||||||
|
pattern of such abuse occurs in the area of products for individuals to
|
||||||
|
use, which is precisely where it is most unacceptable. Therefore, we
|
||||||
|
have designed this version of the GPL to prohibit the practice for those
|
||||||
|
products. If such problems arise substantially in other domains, we
|
||||||
|
stand ready to extend this provision to those domains in future versions
|
||||||
|
of the GPL, as needed to protect the freedom of users.
|
||||||
|
|
||||||
|
Finally, every program is threatened constantly by software patents.
|
||||||
|
States should not allow patents to restrict development and use of
|
||||||
|
software on general-purpose computers, but in those that do, we wish to
|
||||||
|
avoid the special danger that patents applied to a free program could
|
||||||
|
make it effectively proprietary. To prevent this, the GPL assures that
|
||||||
|
patents cannot be used to render the program non-free.
|
||||||
|
|
||||||
|
The precise terms and conditions for copying, distribution and
|
||||||
|
modification follow.
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
0. Definitions.
|
||||||
|
|
||||||
|
"This License" refers to version 3 of the GNU General Public License.
|
||||||
|
|
||||||
|
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||||
|
works, such as semiconductor masks.
|
||||||
|
|
||||||
|
"The Program" refers to any copyrightable work licensed under this
|
||||||
|
License. Each licensee is addressed as "you". "Licensees" and
|
||||||
|
"recipients" may be individuals or organizations.
|
||||||
|
|
||||||
|
To "modify" a work means to copy from or adapt all or part of the work
|
||||||
|
in a fashion requiring copyright permission, other than the making of an
|
||||||
|
exact copy. The resulting work is called a "modified version" of the
|
||||||
|
earlier work or a work "based on" the earlier work.
|
||||||
|
|
||||||
|
A "covered work" means either the unmodified Program or a work based
|
||||||
|
on the Program.
|
||||||
|
|
||||||
|
To "propagate" a work means to do anything with it that, without
|
||||||
|
permission, would make you directly or secondarily liable for
|
||||||
|
infringement under applicable copyright law, except executing it on a
|
||||||
|
computer or modifying a private copy. Propagation includes copying,
|
||||||
|
distribution (with or without modification), making available to the
|
||||||
|
public, and in some countries other activities as well.
|
||||||
|
|
||||||
|
To "convey" a work means any kind of propagation that enables other
|
||||||
|
parties to make or receive copies. Mere interaction with a user through
|
||||||
|
a computer network, with no transfer of a copy, is not conveying.
|
||||||
|
|
||||||
|
An interactive user interface displays "Appropriate Legal Notices"
|
||||||
|
to the extent that it includes a convenient and prominently visible
|
||||||
|
feature that (1) displays an appropriate copyright notice, and (2)
|
||||||
|
tells the user that there is no warranty for the work (except to the
|
||||||
|
extent that warranties are provided), that licensees may convey the
|
||||||
|
work under this License, and how to view a copy of this License. If
|
||||||
|
the interface presents a list of user commands or options, such as a
|
||||||
|
menu, a prominent item in the list meets this criterion.
|
||||||
|
|
||||||
|
1. Source Code.
|
||||||
|
|
||||||
|
The "source code" for a work means the preferred form of the work
|
||||||
|
for making modifications to it. "Object code" means any non-source
|
||||||
|
form of a work.
|
||||||
|
|
||||||
|
A "Standard Interface" means an interface that either is an official
|
||||||
|
standard defined by a recognized standards body, or, in the case of
|
||||||
|
interfaces specified for a particular programming language, one that
|
||||||
|
is widely used among developers working in that language.
|
||||||
|
|
||||||
|
The "System Libraries" of an executable work include anything, other
|
||||||
|
than the work as a whole, that (a) is included in the normal form of
|
||||||
|
packaging a Major Component, but which is not part of that Major
|
||||||
|
Component, and (b) serves only to enable use of the work with that
|
||||||
|
Major Component, or to implement a Standard Interface for which an
|
||||||
|
implementation is available to the public in source code form. A
|
||||||
|
"Major Component", in this context, means a major essential component
|
||||||
|
(kernel, window system, and so on) of the specific operating system
|
||||||
|
(if any) on which the executable work runs, or a compiler used to
|
||||||
|
produce the work, or an object code interpreter used to run it.
|
||||||
|
|
||||||
|
The "Corresponding Source" for a work in object code form means all
|
||||||
|
the source code needed to generate, install, and (for an executable
|
||||||
|
work) run the object code and to modify the work, including scripts to
|
||||||
|
control those activities. However, it does not include the work's
|
||||||
|
System Libraries, or general-purpose tools or generally available free
|
||||||
|
programs which are used unmodified in performing those activities but
|
||||||
|
which are not part of the work. For example, Corresponding Source
|
||||||
|
includes interface definition files associated with source files for
|
||||||
|
the work, and the source code for shared libraries and dynamically
|
||||||
|
linked subprograms that the work is specifically designed to require,
|
||||||
|
such as by intimate data communication or control flow between those
|
||||||
|
subprograms and other parts of the work.
|
||||||
|
|
||||||
|
The Corresponding Source need not include anything that users
|
||||||
|
can regenerate automatically from other parts of the Corresponding
|
||||||
|
Source.
|
||||||
|
|
||||||
|
The Corresponding Source for a work in source code form is that
|
||||||
|
same work.
|
||||||
|
|
||||||
|
2. Basic Permissions.
|
||||||
|
|
||||||
|
All rights granted under this License are granted for the term of
|
||||||
|
copyright on the Program, and are irrevocable provided the stated
|
||||||
|
conditions are met. This License explicitly affirms your unlimited
|
||||||
|
permission to run the unmodified Program. The output from running a
|
||||||
|
covered work is covered by this License only if the output, given its
|
||||||
|
content, constitutes a covered work. This License acknowledges your
|
||||||
|
rights of fair use or other equivalent, as provided by copyright law.
|
||||||
|
|
||||||
|
You may make, run and propagate covered works that you do not
|
||||||
|
convey, without conditions so long as your license otherwise remains
|
||||||
|
in force. You may convey covered works to others for the sole purpose
|
||||||
|
of having them make modifications exclusively for you, or provide you
|
||||||
|
with facilities for running those works, provided that you comply with
|
||||||
|
the terms of this License in conveying all material for which you do
|
||||||
|
not control copyright. Those thus making or running the covered works
|
||||||
|
for you must do so exclusively on your behalf, under your direction
|
||||||
|
and control, on terms that prohibit them from making any copies of
|
||||||
|
your copyrighted material outside their relationship with you.
|
||||||
|
|
||||||
|
Conveying under any other circumstances is permitted solely under
|
||||||
|
the conditions stated below. Sublicensing is not allowed; section 10
|
||||||
|
makes it unnecessary.
|
||||||
|
|
||||||
|
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||||
|
|
||||||
|
No covered work shall be deemed part of an effective technological
|
||||||
|
measure under any applicable law fulfilling obligations under article
|
||||||
|
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||||
|
similar laws prohibiting or restricting circumvention of such
|
||||||
|
measures.
|
||||||
|
|
||||||
|
When you convey a covered work, you waive any legal power to forbid
|
||||||
|
circumvention of technological measures to the extent such circumvention
|
||||||
|
is effected by exercising rights under this License with respect to
|
||||||
|
the covered work, and you disclaim any intention to limit operation or
|
||||||
|
modification of the work as a means of enforcing, against the work's
|
||||||
|
users, your or third parties' legal rights to forbid circumvention of
|
||||||
|
technological measures.
|
||||||
|
|
||||||
|
4. Conveying Verbatim Copies.
|
||||||
|
|
||||||
|
You may convey verbatim copies of the Program's source code as you
|
||||||
|
receive it, in any medium, provided that you conspicuously and
|
||||||
|
appropriately publish on each copy an appropriate copyright notice;
|
||||||
|
keep intact all notices stating that this License and any
|
||||||
|
non-permissive terms added in accord with section 7 apply to the code;
|
||||||
|
keep intact all notices of the absence of any warranty; and give all
|
||||||
|
recipients a copy of this License along with the Program.
|
||||||
|
|
||||||
|
You may charge any price or no price for each copy that you convey,
|
||||||
|
and you may offer support or warranty protection for a fee.
|
||||||
|
|
||||||
|
5. Conveying Modified Source Versions.
|
||||||
|
|
||||||
|
You may convey a work based on the Program, or the modifications to
|
||||||
|
produce it from the Program, in the form of source code under the
|
||||||
|
terms of section 4, provided that you also meet all of these conditions:
|
||||||
|
|
||||||
|
a) The work must carry prominent notices stating that you modified
|
||||||
|
it, and giving a relevant date.
|
||||||
|
|
||||||
|
b) The work must carry prominent notices stating that it is
|
||||||
|
released under this License and any conditions added under section
|
||||||
|
7. This requirement modifies the requirement in section 4 to
|
||||||
|
"keep intact all notices".
|
||||||
|
|
||||||
|
c) You must license the entire work, as a whole, under this
|
||||||
|
License to anyone who comes into possession of a copy. This
|
||||||
|
License will therefore apply, along with any applicable section 7
|
||||||
|
additional terms, to the whole of the work, and all its parts,
|
||||||
|
regardless of how they are packaged. This License gives no
|
||||||
|
permission to license the work in any other way, but it does not
|
||||||
|
invalidate such permission if you have separately received it.
|
||||||
|
|
||||||
|
d) If the work has interactive user interfaces, each must display
|
||||||
|
Appropriate Legal Notices; however, if the Program has interactive
|
||||||
|
interfaces that do not display Appropriate Legal Notices, your
|
||||||
|
work need not make them do so.
|
||||||
|
|
||||||
|
A compilation of a covered work with other separate and independent
|
||||||
|
works, which are not by their nature extensions of the covered work,
|
||||||
|
and which are not combined with it such as to form a larger program,
|
||||||
|
in or on a volume of a storage or distribution medium, is called an
|
||||||
|
"aggregate" if the compilation and its resulting copyright are not
|
||||||
|
used to limit the access or legal rights of the compilation's users
|
||||||
|
beyond what the individual works permit. Inclusion of a covered work
|
||||||
|
in an aggregate does not cause this License to apply to the other
|
||||||
|
parts of the aggregate.
|
||||||
|
|
||||||
|
6. Conveying Non-Source Forms.
|
||||||
|
|
||||||
|
You may convey a covered work in object code form under the terms
|
||||||
|
of sections 4 and 5, provided that you also convey the
|
||||||
|
machine-readable Corresponding Source under the terms of this License,
|
||||||
|
in one of these ways:
|
||||||
|
|
||||||
|
a) Convey the object code in, or embodied in, a physical product
|
||||||
|
(including a physical distribution medium), accompanied by the
|
||||||
|
Corresponding Source fixed on a durable physical medium
|
||||||
|
customarily used for software interchange.
|
||||||
|
|
||||||
|
b) Convey the object code in, or embodied in, a physical product
|
||||||
|
(including a physical distribution medium), accompanied by a
|
||||||
|
written offer, valid for at least three years and valid for as
|
||||||
|
long as you offer spare parts or customer support for that product
|
||||||
|
model, to give anyone who possesses the object code either (1) a
|
||||||
|
copy of the Corresponding Source for all the software in the
|
||||||
|
product that is covered by this License, on a durable physical
|
||||||
|
medium customarily used for software interchange, for a price no
|
||||||
|
more than your reasonable cost of physically performing this
|
||||||
|
conveying of source, or (2) access to copy the
|
||||||
|
Corresponding Source from a network server at no charge.
|
||||||
|
|
||||||
|
c) Convey individual copies of the object code with a copy of the
|
||||||
|
written offer to provide the Corresponding Source. This
|
||||||
|
alternative is allowed only occasionally and noncommercially, and
|
||||||
|
only if you received the object code with such an offer, in accord
|
||||||
|
with subsection 6b.
|
||||||
|
|
||||||
|
d) Convey the object code by offering access from a designated
|
||||||
|
place (gratis or for a charge), and offer equivalent access to the
|
||||||
|
Corresponding Source in the same way through the same place at no
|
||||||
|
further charge. You need not require recipients to copy the
|
||||||
|
Corresponding Source along with the object code. If the place to
|
||||||
|
copy the object code is a network server, the Corresponding Source
|
||||||
|
may be on a different server (operated by you or a third party)
|
||||||
|
that supports equivalent copying facilities, provided you maintain
|
||||||
|
clear directions next to the object code saying where to find the
|
||||||
|
Corresponding Source. Regardless of what server hosts the
|
||||||
|
Corresponding Source, you remain obligated to ensure that it is
|
||||||
|
available for as long as needed to satisfy these requirements.
|
||||||
|
|
||||||
|
e) Convey the object code using peer-to-peer transmission, provided
|
||||||
|
you inform other peers where the object code and Corresponding
|
||||||
|
Source of the work are being offered to the general public at no
|
||||||
|
charge under subsection 6d.
|
||||||
|
|
||||||
|
A separable portion of the object code, whose source code is excluded
|
||||||
|
from the Corresponding Source as a System Library, need not be
|
||||||
|
included in conveying the object code work.
|
||||||
|
|
||||||
|
A "User Product" is either (1) a "consumer product", which means any
|
||||||
|
tangible personal property which is normally used for personal, family,
|
||||||
|
or household purposes, or (2) anything designed or sold for incorporation
|
||||||
|
into a dwelling. In determining whether a product is a consumer product,
|
||||||
|
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||||
|
product received by a particular user, "normally used" refers to a
|
||||||
|
typical or common use of that class of product, regardless of the status
|
||||||
|
of the particular user or of the way in which the particular user
|
||||||
|
actually uses, or expects or is expected to use, the product. A product
|
||||||
|
is a consumer product regardless of whether the product has substantial
|
||||||
|
commercial, industrial or non-consumer uses, unless such uses represent
|
||||||
|
the only significant mode of use of the product.
|
||||||
|
|
||||||
|
"Installation Information" for a User Product means any methods,
|
||||||
|
procedures, authorization keys, or other information required to install
|
||||||
|
and execute modified versions of a covered work in that User Product from
|
||||||
|
a modified version of its Corresponding Source. The information must
|
||||||
|
suffice to ensure that the continued functioning of the modified object
|
||||||
|
code is in no case prevented or interfered with solely because
|
||||||
|
modification has been made.
|
||||||
|
|
||||||
|
If you convey an object code work under this section in, or with, or
|
||||||
|
specifically for use in, a User Product, and the conveying occurs as
|
||||||
|
part of a transaction in which the right of possession and use of the
|
||||||
|
User Product is transferred to the recipient in perpetuity or for a
|
||||||
|
fixed term (regardless of how the transaction is characterized), the
|
||||||
|
Corresponding Source conveyed under this section must be accompanied
|
||||||
|
by the Installation Information. But this requirement does not apply
|
||||||
|
if neither you nor any third party retains the ability to install
|
||||||
|
modified object code on the User Product (for example, the work has
|
||||||
|
been installed in ROM).
|
||||||
|
|
||||||
|
The requirement to provide Installation Information does not include a
|
||||||
|
requirement to continue to provide support service, warranty, or updates
|
||||||
|
for a work that has been modified or installed by the recipient, or for
|
||||||
|
the User Product in which it has been modified or installed. Access to a
|
||||||
|
network may be denied when the modification itself materially and
|
||||||
|
adversely affects the operation of the network or violates the rules and
|
||||||
|
protocols for communication across the network.
|
||||||
|
|
||||||
|
Corresponding Source conveyed, and Installation Information provided,
|
||||||
|
in accord with this section must be in a format that is publicly
|
||||||
|
documented (and with an implementation available to the public in
|
||||||
|
source code form), and must require no special password or key for
|
||||||
|
unpacking, reading or copying.
|
||||||
|
|
||||||
|
7. Additional Terms.
|
||||||
|
|
||||||
|
"Additional permissions" are terms that supplement the terms of this
|
||||||
|
License by making exceptions from one or more of its conditions.
|
||||||
|
Additional permissions that are applicable to the entire Program shall
|
||||||
|
be treated as though they were included in this License, to the extent
|
||||||
|
that they are valid under applicable law. If additional permissions
|
||||||
|
apply only to part of the Program, that part may be used separately
|
||||||
|
under those permissions, but the entire Program remains governed by
|
||||||
|
this License without regard to the additional permissions.
|
||||||
|
|
||||||
|
When you convey a copy of a covered work, you may at your option
|
||||||
|
remove any additional permissions from that copy, or from any part of
|
||||||
|
it. (Additional permissions may be written to require their own
|
||||||
|
removal in certain cases when you modify the work.) You may place
|
||||||
|
additional permissions on material, added by you to a covered work,
|
||||||
|
for which you have or can give appropriate copyright permission.
|
||||||
|
|
||||||
|
Notwithstanding any other provision of this License, for material you
|
||||||
|
add to a covered work, you may (if authorized by the copyright holders of
|
||||||
|
that material) supplement the terms of this License with terms:
|
||||||
|
|
||||||
|
a) Disclaiming warranty or limiting liability differently from the
|
||||||
|
terms of sections 15 and 16 of this License; or
|
||||||
|
|
||||||
|
b) Requiring preservation of specified reasonable legal notices or
|
||||||
|
author attributions in that material or in the Appropriate Legal
|
||||||
|
Notices displayed by works containing it; or
|
||||||
|
|
||||||
|
c) Prohibiting misrepresentation of the origin of that material, or
|
||||||
|
requiring that modified versions of such material be marked in
|
||||||
|
reasonable ways as different from the original version; or
|
||||||
|
|
||||||
|
d) Limiting the use for publicity purposes of names of licensors or
|
||||||
|
authors of the material; or
|
||||||
|
|
||||||
|
e) Declining to grant rights under trademark law for use of some
|
||||||
|
trade names, trademarks, or service marks; or
|
||||||
|
|
||||||
|
f) Requiring indemnification of licensors and authors of that
|
||||||
|
material by anyone who conveys the material (or modified versions of
|
||||||
|
it) with contractual assumptions of liability to the recipient, for
|
||||||
|
any liability that these contractual assumptions directly impose on
|
||||||
|
those licensors and authors.
|
||||||
|
|
||||||
|
All other non-permissive additional terms are considered "further
|
||||||
|
restrictions" within the meaning of section 10. If the Program as you
|
||||||
|
received it, or any part of it, contains a notice stating that it is
|
||||||
|
governed by this License along with a term that is a further
|
||||||
|
restriction, you may remove that term. If a license document contains
|
||||||
|
a further restriction but permits relicensing or conveying under this
|
||||||
|
License, you may add to a covered work material governed by the terms
|
||||||
|
of that license document, provided that the further restriction does
|
||||||
|
not survive such relicensing or conveying.
|
||||||
|
|
||||||
|
If you add terms to a covered work in accord with this section, you
|
||||||
|
must place, in the relevant source files, a statement of the
|
||||||
|
additional terms that apply to those files, or a notice indicating
|
||||||
|
where to find the applicable terms.
|
||||||
|
|
||||||
|
Additional terms, permissive or non-permissive, may be stated in the
|
||||||
|
form of a separately written license, or stated as exceptions;
|
||||||
|
the above requirements apply either way.
|
||||||
|
|
||||||
|
8. Termination.
|
||||||
|
|
||||||
|
You may not propagate or modify a covered work except as expressly
|
||||||
|
provided under this License. Any attempt otherwise to propagate or
|
||||||
|
modify it is void, and will automatically terminate your rights under
|
||||||
|
this License (including any patent licenses granted under the third
|
||||||
|
paragraph of section 11).
|
||||||
|
|
||||||
|
However, if you cease all violation of this License, then your
|
||||||
|
license from a particular copyright holder is reinstated (a)
|
||||||
|
provisionally, unless and until the copyright holder explicitly and
|
||||||
|
finally terminates your license, and (b) permanently, if the copyright
|
||||||
|
holder fails to notify you of the violation by some reasonable means
|
||||||
|
prior to 60 days after the cessation.
|
||||||
|
|
||||||
|
Moreover, your license from a particular copyright holder is
|
||||||
|
reinstated permanently if the copyright holder notifies you of the
|
||||||
|
violation by some reasonable means, this is the first time you have
|
||||||
|
received notice of violation of this License (for any work) from that
|
||||||
|
copyright holder, and you cure the violation prior to 30 days after
|
||||||
|
your receipt of the notice.
|
||||||
|
|
||||||
|
Termination of your rights under this section does not terminate the
|
||||||
|
licenses of parties who have received copies or rights from you under
|
||||||
|
this License. If your rights have been terminated and not permanently
|
||||||
|
reinstated, you do not qualify to receive new licenses for the same
|
||||||
|
material under section 10.
|
||||||
|
|
||||||
|
9. Acceptance Not Required for Having Copies.
|
||||||
|
|
||||||
|
You are not required to accept this License in order to receive or
|
||||||
|
run a copy of the Program. Ancillary propagation of a covered work
|
||||||
|
occurring solely as a consequence of using peer-to-peer transmission
|
||||||
|
to receive a copy likewise does not require acceptance. However,
|
||||||
|
nothing other than this License grants you permission to propagate or
|
||||||
|
modify any covered work. These actions infringe copyright if you do
|
||||||
|
not accept this License. Therefore, by modifying or propagating a
|
||||||
|
covered work, you indicate your acceptance of this License to do so.
|
||||||
|
|
||||||
|
10. Automatic Licensing of Downstream Recipients.
|
||||||
|
|
||||||
|
Each time you convey a covered work, the recipient automatically
|
||||||
|
receives a license from the original licensors, to run, modify and
|
||||||
|
propagate that work, subject to this License. You are not responsible
|
||||||
|
for enforcing compliance by third parties with this License.
|
||||||
|
|
||||||
|
An "entity transaction" is a transaction transferring control of an
|
||||||
|
organization, or substantially all assets of one, or subdividing an
|
||||||
|
organization, or merging organizations. If propagation of a covered
|
||||||
|
work results from an entity transaction, each party to that
|
||||||
|
transaction who receives a copy of the work also receives whatever
|
||||||
|
licenses to the work the party's predecessor in interest had or could
|
||||||
|
give under the previous paragraph, plus a right to possession of the
|
||||||
|
Corresponding Source of the work from the predecessor in interest, if
|
||||||
|
the predecessor has it or can get it with reasonable efforts.
|
||||||
|
|
||||||
|
You may not impose any further restrictions on the exercise of the
|
||||||
|
rights granted or affirmed under this License. For example, you may
|
||||||
|
not impose a license fee, royalty, or other charge for exercise of
|
||||||
|
rights granted under this License, and you may not initiate litigation
|
||||||
|
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||||
|
any patent claim is infringed by making, using, selling, offering for
|
||||||
|
sale, or importing the Program or any portion of it.
|
||||||
|
|
||||||
|
11. Patents.
|
||||||
|
|
||||||
|
A "contributor" is a copyright holder who authorizes use under this
|
||||||
|
License of the Program or a work on which the Program is based. The
|
||||||
|
work thus licensed is called the contributor's "contributor version".
|
||||||
|
|
||||||
|
A contributor's "essential patent claims" are all patent claims
|
||||||
|
owned or controlled by the contributor, whether already acquired or
|
||||||
|
hereafter acquired, that would be infringed by some manner, permitted
|
||||||
|
by this License, of making, using, or selling its contributor version,
|
||||||
|
but do not include claims that would be infringed only as a
|
||||||
|
consequence of further modification of the contributor version. For
|
||||||
|
purposes of this definition, "control" includes the right to grant
|
||||||
|
patent sublicenses in a manner consistent with the requirements of
|
||||||
|
this License.
|
||||||
|
|
||||||
|
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||||
|
patent license under the contributor's essential patent claims, to
|
||||||
|
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||||
|
propagate the contents of its contributor version.
|
||||||
|
|
||||||
|
In the following three paragraphs, a "patent license" is any express
|
||||||
|
agreement or commitment, however denominated, not to enforce a patent
|
||||||
|
(such as an express permission to practice a patent or covenant not to
|
||||||
|
sue for patent infringement). To "grant" such a patent license to a
|
||||||
|
party means to make such an agreement or commitment not to enforce a
|
||||||
|
patent against the party.
|
||||||
|
|
||||||
|
If you convey a covered work, knowingly relying on a patent license,
|
||||||
|
and the Corresponding Source of the work is not available for anyone
|
||||||
|
to copy, free of charge and under the terms of this License, through a
|
||||||
|
publicly available network server or other readily accessible means,
|
||||||
|
then you must either (1) cause the Corresponding Source to be so
|
||||||
|
available, or (2) arrange to deprive yourself of the benefit of the
|
||||||
|
patent license for this particular work, or (3) arrange, in a manner
|
||||||
|
consistent with the requirements of this License, to extend the patent
|
||||||
|
license to downstream recipients. "Knowingly relying" means you have
|
||||||
|
actual knowledge that, but for the patent license, your conveying the
|
||||||
|
covered work in a country, or your recipient's use of the covered work
|
||||||
|
in a country, would infringe one or more identifiable patents in that
|
||||||
|
country that you have reason to believe are valid.
|
||||||
|
|
||||||
|
If, pursuant to or in connection with a single transaction or
|
||||||
|
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||||
|
covered work, and grant a patent license to some of the parties
|
||||||
|
receiving the covered work authorizing them to use, propagate, modify
|
||||||
|
or convey a specific copy of the covered work, then the patent license
|
||||||
|
you grant is automatically extended to all recipients of the covered
|
||||||
|
work and works based on it.
|
||||||
|
|
||||||
|
A patent license is "discriminatory" if it does not include within
|
||||||
|
the scope of its coverage, prohibits the exercise of, or is
|
||||||
|
conditioned on the non-exercise of one or more of the rights that are
|
||||||
|
specifically granted under this License. You may not convey a covered
|
||||||
|
work if you are a party to an arrangement with a third party that is
|
||||||
|
in the business of distributing software, under which you make payment
|
||||||
|
to the third party based on the extent of your activity of conveying
|
||||||
|
the work, and under which the third party grants, to any of the
|
||||||
|
parties who would receive the covered work from you, a discriminatory
|
||||||
|
patent license (a) in connection with copies of the covered work
|
||||||
|
conveyed by you (or copies made from those copies), or (b) primarily
|
||||||
|
for and in connection with specific products or compilations that
|
||||||
|
contain the covered work, unless you entered into that arrangement,
|
||||||
|
or that patent license was granted, prior to 28 March 2007.
|
||||||
|
|
||||||
|
Nothing in this License shall be construed as excluding or limiting
|
||||||
|
any implied license or other defenses to infringement that may
|
||||||
|
otherwise be available to you under applicable patent law.
|
||||||
|
|
||||||
|
12. No Surrender of Others' Freedom.
|
||||||
|
|
||||||
|
If conditions are imposed on you (whether by court order, agreement or
|
||||||
|
otherwise) that contradict the conditions of this License, they do not
|
||||||
|
excuse you from the conditions of this License. If you cannot convey a
|
||||||
|
covered work so as to satisfy simultaneously your obligations under this
|
||||||
|
License and any other pertinent obligations, then as a consequence you may
|
||||||
|
not convey it at all. For example, if you agree to terms that obligate you
|
||||||
|
to collect a royalty for further conveying from those to whom you convey
|
||||||
|
the Program, the only way you could satisfy both those terms and this
|
||||||
|
License would be to refrain entirely from conveying the Program.
|
||||||
|
|
||||||
|
13. Use with the GNU Affero General Public License.
|
||||||
|
|
||||||
|
Notwithstanding any other provision of this License, you have
|
||||||
|
permission to link or combine any covered work with a work licensed
|
||||||
|
under version 3 of the GNU Affero General Public License into a single
|
||||||
|
combined work, and to convey the resulting work. The terms of this
|
||||||
|
License will continue to apply to the part which is the covered work,
|
||||||
|
but the special requirements of the GNU Affero General Public License,
|
||||||
|
section 13, concerning interaction through a network will apply to the
|
||||||
|
combination as such.
|
||||||
|
|
||||||
|
14. Revised Versions of this License.
|
||||||
|
|
||||||
|
The Free Software Foundation may publish revised and/or new versions of
|
||||||
|
the GNU General Public License from time to time. Such new versions will
|
||||||
|
be similar in spirit to the present version, but may differ in detail to
|
||||||
|
address new problems or concerns.
|
||||||
|
|
||||||
|
Each version is given a distinguishing version number. If the
|
||||||
|
Program specifies that a certain numbered version of the GNU General
|
||||||
|
Public License "or any later version" applies to it, you have the
|
||||||
|
option of following the terms and conditions either of that numbered
|
||||||
|
version or of any later version published by the Free Software
|
||||||
|
Foundation. If the Program does not specify a version number of the
|
||||||
|
GNU General Public License, you may choose any version ever published
|
||||||
|
by the Free Software Foundation.
|
||||||
|
|
||||||
|
If the Program specifies that a proxy can decide which future
|
||||||
|
versions of the GNU General Public License can be used, that proxy's
|
||||||
|
public statement of acceptance of a version permanently authorizes you
|
||||||
|
to choose that version for the Program.
|
||||||
|
|
||||||
|
Later license versions may give you additional or different
|
||||||
|
permissions. However, no additional obligations are imposed on any
|
||||||
|
author or copyright holder as a result of your choosing to follow a
|
||||||
|
later version.
|
||||||
|
|
||||||
|
15. Disclaimer of Warranty.
|
||||||
|
|
||||||
|
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||||
|
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||||
|
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||||
|
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||||
|
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||||
|
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||||
|
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||||
|
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||||
|
|
||||||
|
16. Limitation of Liability.
|
||||||
|
|
||||||
|
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||||
|
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||||
|
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||||
|
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||||
|
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||||
|
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||||
|
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||||
|
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||||
|
SUCH DAMAGES.
|
||||||
|
|
||||||
|
17. Interpretation of Sections 15 and 16.
|
||||||
|
|
||||||
|
If the disclaimer of warranty and limitation of liability provided
|
||||||
|
above cannot be given local legal effect according to their terms,
|
||||||
|
reviewing courts shall apply local law that most closely approximates
|
||||||
|
an absolute waiver of all civil liability in connection with the
|
||||||
|
Program, unless a warranty or assumption of liability accompanies a
|
||||||
|
copy of the Program in return for a fee.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
54
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/README.md
generated
vendored
Normal file
54
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/README.md
generated
vendored
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
# docxtemplater
|
||||||
|
|
||||||
|
[](https://www.npmjs.org/package/docxtemplater) [](https://www.npmjs.org/package/docxtemplater) [](https://cdnjs.com/libraries/docxtemplater) [](https://raw.githubusercontent.com/open-xml-templating/docxtemplater-build/master/build/docxtemplater-latest.min.js) [](https://raw.githubusercontent.com/open-xml-templating/docxtemplater-build/master/build/docxtemplater-latest.min.js)
|
||||||
|
|
||||||
|
**docxtemplater** is a library to generate docx/pptx documents from a docx/pptx template. It can replace {placeholders} with data and also supports loops and conditions. The templates can be edited by non-programmers, for example your clients.
|
||||||
|
|
||||||
|
**docxtemplater** is very robust because of the many fixed issues over the years, and the high quality of tests and code.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
[Demo Site](https://docxtemplater.com/demo)
|
||||||
|
|
||||||
|
- <a href="https://docxtemplater.com/demo#simple">Replace a {placeholder} by a value</a>
|
||||||
|
- <a href="https://docxtemplater.com/demo#loops">Use loops: {#users} {name} {/users} </a>
|
||||||
|
- <a href="https://docxtemplater.com/demo#loop-table">Use loops in tables to generate columns</a>
|
||||||
|
- <a href="https://docxtemplater.com/demo#conditions">Use conditions (if users.length>3) with angular Parsing</a>
|
||||||
|
- <a href="https://docxtemplater.com/demo#xml-insertion">Insert custom XML {@rawXml} (for formatted text for example)</a>
|
||||||
|
|
||||||
|
## Quickstart
|
||||||
|
|
||||||
|
- [Get started with docxtemplater on nodejs](https://docxtemplater.com/docs/get-started-node)
|
||||||
|
- [Get started with docxtemplater in the browser (react, angular, vue, nextjs)](https://docxtemplater.com/docs/get-started-browser)
|
||||||
|
|
||||||
|
## Documentation
|
||||||
|
|
||||||
|
The full documentation of the latest version can be found [here](https://docxtemplater.com/docs).
|
||||||
|
|
||||||
|
See [CHANGELOG.md](CHANGELOG.md) for information about how to migrate from older versions.
|
||||||
|
|
||||||
|
## Modules
|
||||||
|
|
||||||
|
Functionality can be added with the following paid modules :
|
||||||
|
|
||||||
|
- [Image module](https://docxtemplater.com/modules/image/) to add a given image with the syntax: `{%image}`;
|
||||||
|
- [Html Module](https://docxtemplater.com/modules/html/) to insert formatted text in a docx document with the syntax `{~html}`;
|
||||||
|
- [XLSX Module](https://docxtemplater.com/modules/xlsx) to be able to do templating on Excel files (xlsx extension), also with loops and conditions;
|
||||||
|
- [Chart Module](https://docxtemplater.com/modules/chart/) to replace a chart by using data from the JSON object that you give with the syntax `{$chart}`;
|
||||||
|
- [Html-Pptx Module](https://docxtemplater.com/modules/html-pptx/) to insert formatted text in a pptx document with the syntax `{~html}`;
|
||||||
|
- [Error Location Module](https://docxtemplater.com/modules/error-location) to show the errors in the template using Word comments
|
||||||
|
- [Slides Module](https://docxtemplater.com/modules/slides/) to create multiple slides dynamically with the syntax `{:users}`;
|
||||||
|
- [Subtemplate Module](https://docxtemplater.com/modules/subtemplate) to include an external docx file inside a given docx file with the syntax `{:include doc}`;
|
||||||
|
- [Subsection Module](https://docxtemplater.com/modules/subsection) to include subsections (headers/footers) from an other document with the syntax `{:subsection doc}`;
|
||||||
|
- [Subtemplate-pptx Module](https://docxtemplater.com/modules/pptx-sub/) to include an external pptx file inside a given pptx file with the syntax `{:include doc}`;
|
||||||
|
- [Word-Run Module](https://docxtemplater.com/modules/word-run) to include raw runs (<w:r>) inside the document with the syntax `{r@wrun}`. This makes it possible to include styled text without having to remove the enclosing paragraph like in the {@rawXml} tag;
|
||||||
|
- [QrCode Module](https://docxtemplater.com/modules/qrcode) to replace an image, keeping any existing properties;
|
||||||
|
- [Table Module](https://docxtemplater.com/modules/table) to create tables from two dimensional data using the syntax `{:table data}`;
|
||||||
|
- [Meta Module](https://docxtemplater.com/modules/meta) to make a document readonly, add a text watermark or update the margins;
|
||||||
|
- [Styling Module](https://docxtemplater.com/modules/styling) restyle a paragraph, a cell or a table depending on some data using the syntax `{:stylepar style}`;
|
||||||
|
- [Footnotes Module](https://docxtemplater.com/modules/footnotes) to be able to add footnotes to a document using the syntax `{:footnotes foot}`
|
||||||
|
- [Paragraph Placeholder Module](https://docxtemplater.com/modules/paragraph-placeholder) to simplify conditions that should show or hide a given paragraph using the syntax `{?tag}`
|
||||||
|
|
||||||
|
## About docxtemplater
|
||||||
|
|
||||||
|
Docxtemplater is my main job, and has been maintained for over 8 years. Expect to get great support if you buy any modules, and also good support on the open-source version.
|
||||||
15
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/SECURITY.md
generated
vendored
Normal file
15
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/SECURITY.md
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
# Security Policy
|
||||||
|
|
||||||
|
## Supported Versions
|
||||||
|
|
||||||
|
The latest 3.x version of docxtemplater is the one that will be supported.
|
||||||
|
|
||||||
|
This version will have security bugfixes until at least 31/12/2025.
|
||||||
|
|
||||||
|
## Reporting a Vulnerability
|
||||||
|
|
||||||
|
To report a vulnerability, please use the "contact us" button on https://docxtemplater.com/
|
||||||
|
|
||||||
|
You should get an update on the reported vulnerability within 48 hours.
|
||||||
|
|
||||||
|
The fix will come in quickly, within one week.
|
||||||
5808
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/build/docxtemplater.js
generated
vendored
Normal file
5808
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/build/docxtemplater.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/build/docxtemplater.min.js
generated
vendored
Normal file
1
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/build/docxtemplater.min.js
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
0
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/constructorOptionsSchema.ts
generated
vendored
Normal file
0
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/constructorOptionsSchema.ts
generated
vendored
Normal file
229
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/docxtemplater.d.ts
generated
vendored
Normal file
229
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/docxtemplater.d.ts
generated
vendored
Normal file
@@ -0,0 +1,229 @@
|
|||||||
|
declare namespace DXT {
|
||||||
|
type integer = number;
|
||||||
|
|
||||||
|
interface SimplePart {
|
||||||
|
type: string;
|
||||||
|
value: string;
|
||||||
|
module?: string;
|
||||||
|
[x: string]: any;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface Part {
|
||||||
|
type: string;
|
||||||
|
value: string;
|
||||||
|
module: string;
|
||||||
|
raw: string;
|
||||||
|
offset: integer;
|
||||||
|
lIndex: integer;
|
||||||
|
num: integer;
|
||||||
|
inverted?: boolean;
|
||||||
|
endLindex?: integer;
|
||||||
|
expanded?: Part[];
|
||||||
|
subparsed?: Part[];
|
||||||
|
position?: string;
|
||||||
|
tag?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ScopeManager {
|
||||||
|
scopeList: any[];
|
||||||
|
scopeLindex: integer[];
|
||||||
|
scopePath: string[];
|
||||||
|
scopePathItem: integer[];
|
||||||
|
scopePathLength: integer[];
|
||||||
|
resolved: any;
|
||||||
|
cachedParsers: Record<
|
||||||
|
string,
|
||||||
|
(scope: any, context: ParserContext) => any
|
||||||
|
>;
|
||||||
|
parser(tag: string): Parser;
|
||||||
|
getValue(value: string, { part: Part }): any;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface Rendered {
|
||||||
|
value: string;
|
||||||
|
errors: any[];
|
||||||
|
}
|
||||||
|
|
||||||
|
type Error = any;
|
||||||
|
type Compression = "STORE" | "DEFLATE";
|
||||||
|
|
||||||
|
interface ZipOptions {
|
||||||
|
/**
|
||||||
|
* the default file compression method to use. Available methods are `STORE` and `DEFLATE`. You can also provide your own compression method.
|
||||||
|
* @default "DEFLATE"
|
||||||
|
*/
|
||||||
|
compression?: Compression | undefined;
|
||||||
|
/**
|
||||||
|
* the options to use when compressing the file. With `STORE` (no compression), this parameter is ignored.
|
||||||
|
* With `DEFLATE`, you can give the compression level with `compressionOptions : {level:6}`
|
||||||
|
* (or any level between 1 (best speed) and 9 (best compression)).
|
||||||
|
*
|
||||||
|
* Note : if the entry is already compressed (coming from a compressed zip file),
|
||||||
|
* calling `generate()` with a different compression level won't update the entry.
|
||||||
|
* The reason is simple : PizZip doesn't know how compressed the content was and how to match the compression level with the implementation we use.
|
||||||
|
*/
|
||||||
|
compressionOptions?:
|
||||||
|
| {
|
||||||
|
level: 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9;
|
||||||
|
}
|
||||||
|
| null
|
||||||
|
| undefined;
|
||||||
|
/**
|
||||||
|
* The comment to use for the zip file.
|
||||||
|
*/
|
||||||
|
comment?: string | undefined;
|
||||||
|
/**
|
||||||
|
* The platform to use when generating the zip file. When using `DOS`, the attribute `dosPermissions` of each file is used.
|
||||||
|
* When using `UNIX`, the attribute `unixPermissions` of each file is used.
|
||||||
|
* If you set the platform value on nodejs, be sure to use `process.platform`.
|
||||||
|
* `fs.stats` returns a non executable mode for folders on windows,
|
||||||
|
* if you force the platform to `UNIX` the generated zip file will have a strange behavior on UNIX platforms.
|
||||||
|
* @default "DOS"
|
||||||
|
*/
|
||||||
|
platform?: "DOS" | "UNIX" | NodeJS.Platform | undefined;
|
||||||
|
/**
|
||||||
|
* The function to encode the file name / comment.
|
||||||
|
* By default, PizZip uses UTF-8 to encode the file names / comments. You can use this method to force an other encoding.
|
||||||
|
* Note : the encoding used is not stored in a zip file, not using UTF-8 may lead to encoding issues.
|
||||||
|
* The function takes a string and returns a bytes array (Uint8Array or Array).
|
||||||
|
*/
|
||||||
|
encodeFileName?(name: string): Buffer;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The function to change the ordering of the files in the zip archive.
|
||||||
|
* The function takes the files array and returns the list of files in the order that you want them to be in the final zip file.
|
||||||
|
*/
|
||||||
|
fileOrder?(files: string[]): string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
interface RenderOptions {
|
||||||
|
joinUncorrupt(parts: Part[], options: RenderOptions): Part[];
|
||||||
|
render(part: Part, options: RenderOptions): Rendered | null;
|
||||||
|
nullGetter?(part: Part, scopeManager: ScopeManager): any;
|
||||||
|
resolvedId: string;
|
||||||
|
index: number;
|
||||||
|
scopeManager: ScopeManager;
|
||||||
|
stripInvalidXMLChars: boolean;
|
||||||
|
linebreaks: boolean;
|
||||||
|
fileType: string;
|
||||||
|
fileTypeConfig: any;
|
||||||
|
filePath: string;
|
||||||
|
contentType: string;
|
||||||
|
parser: Parser;
|
||||||
|
cachedParsers: Record<
|
||||||
|
string,
|
||||||
|
(scope: any, context: ParserContext) => any
|
||||||
|
>;
|
||||||
|
compiled: Part[];
|
||||||
|
}
|
||||||
|
|
||||||
|
interface Module {
|
||||||
|
set?(options: any): void;
|
||||||
|
clone?(): Module;
|
||||||
|
matchers?(): [
|
||||||
|
string,
|
||||||
|
string,
|
||||||
|
{ [x: string]: any } | ((part: SimplePart) => { [x: string]: any }),
|
||||||
|
][];
|
||||||
|
render?(part: Part, options: RenderOptions): Rendered | null;
|
||||||
|
getTraits?(traitName: string, parsed: any): any;
|
||||||
|
getFileType?(opts: any): string | void;
|
||||||
|
nullGetter?(part: Part, scopeManager: ScopeManager): any;
|
||||||
|
optionsTransformer?(options: Options, doc: Docxtemplater): Options;
|
||||||
|
postrender?(parts: string[], options: any): string[];
|
||||||
|
errorsTransformer?(errors: Error[]): Error[];
|
||||||
|
getRenderedMap?(map: any): any;
|
||||||
|
preparse?(parsed: any, options: any): any;
|
||||||
|
parse?(placeHolderContent: string): SimplePart | null;
|
||||||
|
postparse?(postparsed: Part[], modules: Module[], options: any): Part[];
|
||||||
|
on?(event: string): void;
|
||||||
|
preResolve?(options: any): void;
|
||||||
|
resolve?(part: Part, options: any): null | Promise<any>;
|
||||||
|
preZip?(content: string, currentFile: string): null | string;
|
||||||
|
|
||||||
|
[x: string]: any;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ParserContext {
|
||||||
|
meta: {
|
||||||
|
part: Part;
|
||||||
|
};
|
||||||
|
scopeList: any[];
|
||||||
|
scopePath: string[];
|
||||||
|
scopePathItem: integer[];
|
||||||
|
scopePathLength: integer[];
|
||||||
|
num: integer;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface Parser {
|
||||||
|
get(scope: any, context: ParserContext): any;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface Syntax {
|
||||||
|
allowUnopenedTag?: boolean;
|
||||||
|
allowUnclosedTag?: boolean;
|
||||||
|
allowUnbalancedLoops?: boolean;
|
||||||
|
changeDelimiterPrefix?: string | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface Options {
|
||||||
|
delimiters?: { start: string | null; end: string | null };
|
||||||
|
paragraphLoop?: boolean;
|
||||||
|
parser?(tag: string): Parser;
|
||||||
|
errorLogging?: boolean | string;
|
||||||
|
linebreaks?: boolean;
|
||||||
|
nullGetter?(part: Part, scopeManager: ScopeManager): any;
|
||||||
|
fileTypeConfig?: any;
|
||||||
|
syntax?: Syntax;
|
||||||
|
stripInvalidXMLChars?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ConstructorOptions extends Options {
|
||||||
|
modules?: Module[];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
declare class Docxtemplater<TZip = any> {
|
||||||
|
static default: typeof Docxtemplater;
|
||||||
|
/**
|
||||||
|
* Create Docxtemplater instance (and compile it on the fly)
|
||||||
|
*
|
||||||
|
* @param zip Serialized zip archive
|
||||||
|
* @param options `modules` and other options
|
||||||
|
*/
|
||||||
|
constructor(zip: TZip, options?: DXT.ConstructorOptions);
|
||||||
|
/**
|
||||||
|
* Create Docxtemplater instance, without options
|
||||||
|
*/
|
||||||
|
constructor();
|
||||||
|
|
||||||
|
setData(data: any): this;
|
||||||
|
resolveData(data: any): Promise<any>;
|
||||||
|
render(data?: any): this;
|
||||||
|
renderAsync(data?: any): Promise<any>;
|
||||||
|
getZip(): TZip;
|
||||||
|
|
||||||
|
loadZip(zip: TZip): this;
|
||||||
|
setOptions(options: DXT.Options): this;
|
||||||
|
attachModule(module: DXT.Module): this;
|
||||||
|
compile(): this;
|
||||||
|
getFullText(path?: string): string;
|
||||||
|
targets: string[]; // used to know which files are templated
|
||||||
|
replaceFirstSection?: boolean; // used for the subsection module
|
||||||
|
replaceLastSection?: boolean; // used for the subsection module
|
||||||
|
includeSections?: boolean; // used for the subsection module
|
||||||
|
keepStyles?: boolean; // used for the subtemplate module
|
||||||
|
modules: DXT.Module[];
|
||||||
|
|
||||||
|
toBuffer(options?: DXT.ZipOptions): Buffer;
|
||||||
|
toBlob(options?: DXT.ZipOptions): Blob;
|
||||||
|
toBase64(options?: DXT.ZipOptions): string;
|
||||||
|
toUint8Array(options?: DXT.ZipOptions): Uint8Array;
|
||||||
|
toArrayBuffer(options?: DXT.ZipOptions): ArrayBuffer;
|
||||||
|
}
|
||||||
|
|
||||||
|
declare namespace Docxtemplater {
|
||||||
|
export { DXT };
|
||||||
|
}
|
||||||
|
|
||||||
|
export = Docxtemplater;
|
||||||
426
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/docxtemplater.test-d.ts
generated
vendored
Normal file
426
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/docxtemplater.test-d.ts
generated
vendored
Normal file
@@ -0,0 +1,426 @@
|
|||||||
|
import Docxtemplater, { DXT } from "./docxtemplater";
|
||||||
|
import InspectModule from "./inspect-module";
|
||||||
|
import expressionParser from "../expressions";
|
||||||
|
import ieExpressionParser from "../expressions-ie11";
|
||||||
|
import TxtTemplater from "./text";
|
||||||
|
const PizZip: any = require("pizzip");
|
||||||
|
import { expectType, expectError } from "tsd";
|
||||||
|
|
||||||
|
expressionParser.filters.map = function (input: any, key: any): any {
|
||||||
|
if (!input) {
|
||||||
|
return input;
|
||||||
|
}
|
||||||
|
|
||||||
|
if ("map" in input) {
|
||||||
|
return input.map(function (x: any) {
|
||||||
|
return x[key];
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
ieExpressionParser.filters.map = function (input: any, key: any): any {
|
||||||
|
if (!input) {
|
||||||
|
return input;
|
||||||
|
}
|
||||||
|
|
||||||
|
if ("map" in input) {
|
||||||
|
return input.map(function (x: any) {
|
||||||
|
return x[key];
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const tDoc = new TxtTemplater("Hello {#users}{name},{/users} how are you ?", {
|
||||||
|
parser: expressionParser,
|
||||||
|
});
|
||||||
|
tDoc.render({ users: [{ name: "John" }, { name: "Baz" }] });
|
||||||
|
|
||||||
|
const tDoc2 = new TxtTemplater("Hello {#users}{name},{/users} how are you ?", {
|
||||||
|
parser: expressionParser,
|
||||||
|
});
|
||||||
|
tDoc2
|
||||||
|
.renderAsync({ users: [{ name: "John" }, { name: "Baz" }] })
|
||||||
|
.then(function (result: any) {
|
||||||
|
console.log(result.toUpperCase());
|
||||||
|
});
|
||||||
|
|
||||||
|
const doc1 = new Docxtemplater(
|
||||||
|
{},
|
||||||
|
{
|
||||||
|
delimiters: { start: "[[", end: "]]" },
|
||||||
|
nullGetter: function (part) {
|
||||||
|
expectError(part.foobar);
|
||||||
|
if (part.module === "rawxml") {
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
if (part.type === "placeholder" && part.value === "foobar") {
|
||||||
|
return "{Foobar}";
|
||||||
|
}
|
||||||
|
return "Hello";
|
||||||
|
},
|
||||||
|
}
|
||||||
|
);
|
||||||
|
const iModule = new InspectModule();
|
||||||
|
doc1.setData({ foo: "bar" });
|
||||||
|
doc1.attachModule({
|
||||||
|
set: function () {},
|
||||||
|
parse: function (placeHolderContent) {
|
||||||
|
if (placeHolderContent.indexOf(":hello") === 0) {
|
||||||
|
return {
|
||||||
|
type: "placeholder",
|
||||||
|
module: "mycustomModule",
|
||||||
|
value: placeHolderContent.substr(7),
|
||||||
|
isEmpty: "foobar",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
},
|
||||||
|
getFoobar: function () {},
|
||||||
|
});
|
||||||
|
doc1.attachModule(iModule);
|
||||||
|
const tags = iModule.getAllTags();
|
||||||
|
const tags2 = iModule.getAllStructuredTags();
|
||||||
|
const nullValues = iModule.fullInspected["word/document.xml"].nullValues;
|
||||||
|
const firstTag = nullValues.detail[0].part.value;
|
||||||
|
const scope = nullValues.detail[0].scopeManager.scopeList[0];
|
||||||
|
expectType<string>(firstTag);
|
||||||
|
doc1.render();
|
||||||
|
|
||||||
|
const buf: Buffer = doc1.toBuffer({
|
||||||
|
compression: "DEFLATE",
|
||||||
|
});
|
||||||
|
const blob: Blob = doc1.toBlob({
|
||||||
|
compression: "DEFLATE",
|
||||||
|
});
|
||||||
|
const str: string = doc1.toBase64({
|
||||||
|
compression: "DEFLATE",
|
||||||
|
});
|
||||||
|
const u8: Uint8Array = doc1.toUint8Array({
|
||||||
|
compression: "DEFLATE",
|
||||||
|
});
|
||||||
|
const ab: ArrayBuffer = doc1.toArrayBuffer({
|
||||||
|
compression: "DEFLATE",
|
||||||
|
});
|
||||||
|
|
||||||
|
new Docxtemplater(
|
||||||
|
{},
|
||||||
|
{
|
||||||
|
stripInvalidXMLChars: true,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
new Docxtemplater(
|
||||||
|
{},
|
||||||
|
{
|
||||||
|
stripInvalidXMLChars: false,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
new Docxtemplater(
|
||||||
|
{},
|
||||||
|
{
|
||||||
|
errorLogging: false,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
new Docxtemplater(
|
||||||
|
{},
|
||||||
|
{
|
||||||
|
errorLogging: "jsonl",
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
new Docxtemplater(
|
||||||
|
{},
|
||||||
|
{
|
||||||
|
errorLogging: "json",
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
expectError(doc1.foobar());
|
||||||
|
expectError(new Docxtemplater(1, 2));
|
||||||
|
expectError(new Docxtemplater({}, { delimiters: { start: 1, end: "]]" } }));
|
||||||
|
expectError(new Docxtemplater({}, { delimiters: { start: "[[" } }));
|
||||||
|
expectError(new Docxtemplater({}, { stripInvalidXMLChars: "yo" }));
|
||||||
|
|
||||||
|
const doc2 = new Docxtemplater();
|
||||||
|
doc2.loadZip(new PizZip("hello"));
|
||||||
|
|
||||||
|
// Error because parser should return a {get: fn} object
|
||||||
|
expectError(
|
||||||
|
doc2.setOptions({
|
||||||
|
parser: function (tag) {
|
||||||
|
return 10;
|
||||||
|
},
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
doc2.setOptions({
|
||||||
|
parser: function (tag) {
|
||||||
|
expectType<string>(tag);
|
||||||
|
return {
|
||||||
|
get: function (scope, context) {
|
||||||
|
const first = context.scopeList[0];
|
||||||
|
expectType<DXT.integer>(context.num);
|
||||||
|
expectError(context.foobar);
|
||||||
|
if (context.meta.part.value === tag) {
|
||||||
|
return scope[context.meta.part.value];
|
||||||
|
}
|
||||||
|
expectError(context.meta.part.other);
|
||||||
|
return scope[tag];
|
||||||
|
},
|
||||||
|
};
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const doc3 = new Docxtemplater();
|
||||||
|
doc3.loadZip(new PizZip("hello"));
|
||||||
|
doc3.compile();
|
||||||
|
doc3.resolveData({ a: "b" }).then(function () {
|
||||||
|
doc3.render();
|
||||||
|
});
|
||||||
|
doc3.replaceFirstSection = true;
|
||||||
|
doc3.replaceLastSection = true;
|
||||||
|
const doc4 = new Docxtemplater(new PizZip("hello"));
|
||||||
|
doc4.renderAsync({ a: "b" }).then(function () {
|
||||||
|
console.log("end");
|
||||||
|
});
|
||||||
|
const text = doc3.getFullText();
|
||||||
|
const text2 = doc3.getFullText("word/heading1.xml");
|
||||||
|
|
||||||
|
new Docxtemplater(new PizZip("hello"), { errorLogging: false });
|
||||||
|
|
||||||
|
// Error because getFullText requires a string parameter
|
||||||
|
expectError(doc3.getFullText(false));
|
||||||
|
expectError(doc3.getFullText(10));
|
||||||
|
|
||||||
|
const doc5 = new Docxtemplater(new PizZip("hello"), {
|
||||||
|
parser: expressionParser,
|
||||||
|
});
|
||||||
|
|
||||||
|
const doc6 = new Docxtemplater(new PizZip("hello"), {
|
||||||
|
parser: ieExpressionParser,
|
||||||
|
});
|
||||||
|
|
||||||
|
const doc7 = new Docxtemplater(new PizZip("hello"), {
|
||||||
|
parser: expressionParser.configure({
|
||||||
|
filters: {
|
||||||
|
foo: (a: any) => a,
|
||||||
|
bar: (a: any) => a,
|
||||||
|
},
|
||||||
|
csp: true,
|
||||||
|
cache: {},
|
||||||
|
literals: { true: true },
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
const doc8 = new Docxtemplater(new PizZip("hello"), {
|
||||||
|
parser: ieExpressionParser.configure({
|
||||||
|
filters: {
|
||||||
|
foo: (a: any) => a,
|
||||||
|
bar: (a: any) => a,
|
||||||
|
},
|
||||||
|
csp: true,
|
||||||
|
cache: {},
|
||||||
|
literals: { true: true },
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
const doc9 = new Docxtemplater(new PizZip("hello"), {
|
||||||
|
syntax: {
|
||||||
|
allowUnopenedTag: true,
|
||||||
|
allowUnclosedTag: true,
|
||||||
|
changeDelimiterPrefix: null,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const doc10 = new Docxtemplater(new PizZip("hello"), {
|
||||||
|
syntax: {
|
||||||
|
allowUnopenedTag: true,
|
||||||
|
changeDelimiterPrefix: "",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
function validStartChars(ch: string): boolean {
|
||||||
|
return /[a-z]/.test(ch);
|
||||||
|
}
|
||||||
|
function validContinuationChars(ch: string): boolean {
|
||||||
|
return /[a-z]/.test(ch);
|
||||||
|
}
|
||||||
|
expressionParser.configure({
|
||||||
|
isIdentifierStart: validStartChars,
|
||||||
|
isIdentifierContinue: validContinuationChars,
|
||||||
|
});
|
||||||
|
ieExpressionParser.configure({
|
||||||
|
isIdentifierStart: validStartChars,
|
||||||
|
isIdentifierContinue: validContinuationChars,
|
||||||
|
});
|
||||||
|
|
||||||
|
expressionParser.configure({
|
||||||
|
evaluateIdentifier(
|
||||||
|
tag: string,
|
||||||
|
scope: any,
|
||||||
|
scopeList: any[],
|
||||||
|
context: any
|
||||||
|
) {
|
||||||
|
let res = context.num + context.num;
|
||||||
|
return res;
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
expressionParser.configure({
|
||||||
|
setIdentifier(
|
||||||
|
tag: string,
|
||||||
|
value: any,
|
||||||
|
scope: any,
|
||||||
|
scopeList: any[],
|
||||||
|
context: any
|
||||||
|
) {
|
||||||
|
scopeList[0][tag] = value;
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
expressionParser.configure({
|
||||||
|
postEvaluate(
|
||||||
|
result: any,
|
||||||
|
tag: string,
|
||||||
|
scope: any,
|
||||||
|
context: DXT.ParserContext
|
||||||
|
) {
|
||||||
|
return result;
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
ieExpressionParser.configure({
|
||||||
|
postEvaluate(
|
||||||
|
result: any,
|
||||||
|
tag: string,
|
||||||
|
scope: any,
|
||||||
|
context: DXT.ParserContext
|
||||||
|
) {
|
||||||
|
return result;
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Define the parameter type for getFileType
|
||||||
|
interface FileTypeParams {
|
||||||
|
doc: Docxtemplater;
|
||||||
|
}
|
||||||
|
|
||||||
|
const avoidRenderingCoreXMLModule = {
|
||||||
|
name: "avoidRenderingCoreXMLModule",
|
||||||
|
getFileType({ doc }: FileTypeParams): void {
|
||||||
|
doc.targets = doc.targets.filter(function (file: string) {
|
||||||
|
if (
|
||||||
|
file === "docProps/core.xml" ||
|
||||||
|
file === "docProps/app.xml" ||
|
||||||
|
file === "docProps/custom.xml"
|
||||||
|
) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
});
|
||||||
|
},
|
||||||
|
};
|
||||||
|
new Docxtemplater(new PizZip("hello"), {
|
||||||
|
modules: [
|
||||||
|
avoidRenderingCoreXMLModule,
|
||||||
|
{
|
||||||
|
optionsTransformer(options, doc) {
|
||||||
|
doc.modules.forEach(function (module) {
|
||||||
|
if (module.name === "LoopModule") {
|
||||||
|
module.prefix.start = "FOR ";
|
||||||
|
module.prefix.start = "ENDFOR ";
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return options;
|
||||||
|
},
|
||||||
|
render(part, options) {
|
||||||
|
if (part.type === "placeholder") {
|
||||||
|
let value = options.scopeManager.getValue(part.value, {
|
||||||
|
part,
|
||||||
|
});
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
paragraphLoop: true,
|
||||||
|
linebreaks: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
interface SetOptions {
|
||||||
|
Lexer: any;
|
||||||
|
zip: any;
|
||||||
|
}
|
||||||
|
const fixDocPrCorruptionModule: DXT.Module = {
|
||||||
|
set(options: SetOptions) {
|
||||||
|
if (options.Lexer) {
|
||||||
|
this.Lexer = options.Lexer;
|
||||||
|
}
|
||||||
|
if (options.zip) {
|
||||||
|
this.zip = options.zip;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
on(event) {
|
||||||
|
if (event === "attached") {
|
||||||
|
this.attached = false;
|
||||||
|
}
|
||||||
|
if (event !== "syncing-zip") {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const zip = this.zip;
|
||||||
|
const Lexer = this.Lexer;
|
||||||
|
let prId = 1;
|
||||||
|
function setSingleAttribute(
|
||||||
|
partValue: string,
|
||||||
|
attr: string,
|
||||||
|
attrValue: string | number
|
||||||
|
) {
|
||||||
|
const regex = new RegExp(`(<.* ${attr}=")([^"]+)(".*)$`);
|
||||||
|
if (regex.test(partValue)) {
|
||||||
|
return partValue.replace(regex, `$1${attrValue}$3`);
|
||||||
|
}
|
||||||
|
let end = partValue.lastIndexOf("/>");
|
||||||
|
if (end === -1) {
|
||||||
|
end = partValue.lastIndexOf(">");
|
||||||
|
}
|
||||||
|
return (
|
||||||
|
partValue.substr(0, end) +
|
||||||
|
` ${attr}="${attrValue}"` +
|
||||||
|
partValue.substr(end)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
for (const f of zip.file(/\.xml$/)) {
|
||||||
|
let text = f.asText();
|
||||||
|
const xmllexed = Lexer.xmlparse(text, {
|
||||||
|
text: [],
|
||||||
|
other: ["wp:docPr"],
|
||||||
|
});
|
||||||
|
if (xmllexed.length > 1) {
|
||||||
|
text = xmllexed.reduce(function (
|
||||||
|
fullText: string,
|
||||||
|
part: DXT.Part
|
||||||
|
) {
|
||||||
|
if (
|
||||||
|
part.tag === "wp:docPr" &&
|
||||||
|
part.position &&
|
||||||
|
["start", "selfclosing"].indexOf(part.position) !== -1
|
||||||
|
) {
|
||||||
|
return (
|
||||||
|
fullText +
|
||||||
|
setSingleAttribute(part.value, "id", prId++)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return fullText + part.value;
|
||||||
|
}, "");
|
||||||
|
}
|
||||||
|
zip.file(f.name, text);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
};
|
||||||
|
new Docxtemplater(new PizZip("hello"), {
|
||||||
|
modules: [fixDocPrCorruptionModule],
|
||||||
|
});
|
||||||
32
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/expressions-ie11.d.ts
generated
vendored
Normal file
32
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/expressions-ie11.d.ts
generated
vendored
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
import { DXT } from "./js/docxtemplater";
|
||||||
|
|
||||||
|
interface ParserOptions {
|
||||||
|
filters?: { [x: string]: (input: any, ...filters: any[]) => any };
|
||||||
|
csp?: boolean;
|
||||||
|
cache?: any;
|
||||||
|
literals?: { [x: string]: any };
|
||||||
|
isIdentifierStart?: (char: string) => boolean;
|
||||||
|
isIdentifierContinue?: (char: string) => boolean;
|
||||||
|
handleDotThis?: boolean;
|
||||||
|
postEvaluate?: (
|
||||||
|
result: any,
|
||||||
|
tag: string,
|
||||||
|
scope: any,
|
||||||
|
context: DXT.ParserContext
|
||||||
|
) => any;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ExpressionParser extends DXT.Parser {
|
||||||
|
compiled: any;
|
||||||
|
getIdentifiers(): string[];
|
||||||
|
getObjectIdentifiers(): any;
|
||||||
|
}
|
||||||
|
|
||||||
|
type Parser = {
|
||||||
|
(tag: string): ExpressionParser;
|
||||||
|
filters: { [x: string]: (input: any, ...filters: any[]) => any };
|
||||||
|
configure: (options: ParserOptions) => (tag: string) => DXT.Parser;
|
||||||
|
};
|
||||||
|
|
||||||
|
declare var expressionParser: Parser;
|
||||||
|
export default expressionParser;
|
||||||
192
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/expressions-ie11.js
generated
vendored
Normal file
192
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/expressions-ie11.js
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
45
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/expressions.d.ts
generated
vendored
Normal file
45
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/expressions.d.ts
generated
vendored
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
import { DXT } from "./js/docxtemplater";
|
||||||
|
|
||||||
|
interface ParserOptions {
|
||||||
|
filters?: { [x: string]: (input: any, ...filters: any[]) => any };
|
||||||
|
csp?: boolean;
|
||||||
|
cache?: any;
|
||||||
|
literals?: { [x: string]: any };
|
||||||
|
isIdentifierStart?: (char: string) => boolean;
|
||||||
|
isIdentifierContinue?: (char: string) => boolean;
|
||||||
|
handleDotThis?: boolean;
|
||||||
|
postEvaluate?: (
|
||||||
|
result: any,
|
||||||
|
tag: string,
|
||||||
|
scope: any,
|
||||||
|
context: DXT.ParserContext
|
||||||
|
) => any;
|
||||||
|
evaluateIdentifier?: (
|
||||||
|
tag: string,
|
||||||
|
scope: any,
|
||||||
|
scopeList: any[],
|
||||||
|
context: DXT.ParserContext
|
||||||
|
) => any;
|
||||||
|
setIdentifier?: (
|
||||||
|
tag: string,
|
||||||
|
value: any,
|
||||||
|
scope: any,
|
||||||
|
scopeList: any[],
|
||||||
|
context: DXT.ParserContext
|
||||||
|
) => any;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ExpressionParser extends DXT.Parser {
|
||||||
|
compiled: any;
|
||||||
|
getIdentifiers(): string[];
|
||||||
|
getObjectIdentifiers(): any;
|
||||||
|
}
|
||||||
|
|
||||||
|
type Parser = {
|
||||||
|
(tag: string): ExpressionParser;
|
||||||
|
filters: { [x: string]: (input: any, ...filters: any[]) => any };
|
||||||
|
configure: (options: ParserOptions) => (tag: string) => ExpressionParser;
|
||||||
|
};
|
||||||
|
|
||||||
|
declare var expressionParser: Parser;
|
||||||
|
export default expressionParser;
|
||||||
393
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/expressions.js
generated
vendored
Normal file
393
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/expressions.js
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
29
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/inspect-module.d.ts
generated
vendored
Normal file
29
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/inspect-module.d.ts
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
// Type definitions for Docxtemplater 3
|
||||||
|
// Project: https://github.com/open-xml-templating/docxtemplater/
|
||||||
|
// Definitions by: edi9999 <https://github.com/edi9999>
|
||||||
|
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
|
||||||
|
// TypeScript Version: 3.9
|
||||||
|
|
||||||
|
import { DXT } from "./docxtemplater";
|
||||||
|
export default class InspectModule implements DXT.Module {
|
||||||
|
constructor();
|
||||||
|
getAllTags(): Record<string, unknown>;
|
||||||
|
getTags(file?: string): Record<string, unknown>;
|
||||||
|
fullInspected: Record<
|
||||||
|
string,
|
||||||
|
{
|
||||||
|
nullValues: {
|
||||||
|
detail: {
|
||||||
|
part: DXT.Part;
|
||||||
|
scopeManager: DXT.ScopeManager;
|
||||||
|
}[];
|
||||||
|
summary: string[][];
|
||||||
|
};
|
||||||
|
}
|
||||||
|
>;
|
||||||
|
|
||||||
|
getStructuredTags(file?: string): DXT.Part[];
|
||||||
|
getAllStructuredTags(): DXT.Part[];
|
||||||
|
getFileType(): string;
|
||||||
|
getTemplatedFiles(): string[];
|
||||||
|
}
|
||||||
3
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/browser-versions/fs.js
generated
vendored
Normal file
3
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/browser-versions/fs.js
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
module.exports = {};
|
||||||
7
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/browser-versions/xmldom.js
generated
vendored
Normal file
7
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/browser-versions/xmldom.js
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
XMLSerializer: window.XMLSerializer,
|
||||||
|
DOMParser: window.DOMParser,
|
||||||
|
XMLDocument: window.XMLDocument
|
||||||
|
};
|
||||||
16
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/content-types.js
generated
vendored
Normal file
16
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/content-types.js
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
var coreContentType = "application/vnd.openxmlformats-package.core-properties+xml";
|
||||||
|
var appContentType = "application/vnd.openxmlformats-officedocument.extended-properties+xml";
|
||||||
|
var customContentType = "application/vnd.openxmlformats-officedocument.custom-properties+xml";
|
||||||
|
var settingsContentType = "application/vnd.openxmlformats-officedocument.wordprocessingml.settings+xml";
|
||||||
|
var diagramDataContentType = "application/vnd.openxmlformats-officedocument.drawingml.diagramData+xml";
|
||||||
|
var diagramDrawingContentType = "application/vnd.ms-office.drawingml.diagramDrawing+xml";
|
||||||
|
module.exports = {
|
||||||
|
settingsContentType: settingsContentType,
|
||||||
|
coreContentType: coreContentType,
|
||||||
|
appContentType: appContentType,
|
||||||
|
customContentType: customContentType,
|
||||||
|
diagramDataContentType: diagramDataContentType,
|
||||||
|
diagramDrawingContentType: diagramDrawingContentType
|
||||||
|
};
|
||||||
60
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/debugger-module.js
generated
vendored
Normal file
60
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/debugger-module.js
generated
vendored
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
function _typeof(o) { "@babel/helpers - typeof"; return _typeof = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function (o) { return typeof o; } : function (o) { return o && "function" == typeof Symbol && o.constructor === Symbol && o !== Symbol.prototype ? "symbol" : typeof o; }, _typeof(o); }
|
||||||
|
function _classCallCheck(a, n) { if (!(a instanceof n)) throw new TypeError("Cannot call a class as a function"); }
|
||||||
|
function _defineProperties(e, r) { for (var t = 0; t < r.length; t++) { var o = r[t]; o.enumerable = o.enumerable || !1, o.configurable = !0, "value" in o && (o.writable = !0), Object.defineProperty(e, _toPropertyKey(o.key), o); } }
|
||||||
|
function _createClass(e, r, t) { return r && _defineProperties(e.prototype, r), t && _defineProperties(e, t), Object.defineProperty(e, "prototype", { writable: !1 }), e; }
|
||||||
|
function _toPropertyKey(t) { var i = _toPrimitive(t, "string"); return "symbol" == _typeof(i) ? i : i + ""; }
|
||||||
|
function _toPrimitive(t, r) { if ("object" != _typeof(t) || !t) return t; var e = t[Symbol.toPrimitive]; if (void 0 !== e) { var i = e.call(t, r || "default"); if ("object" != _typeof(i)) return i; throw new TypeError("@@toPrimitive must return a primitive value."); } return ("string" === r ? String : Number)(t); }
|
||||||
|
/* eslint-disable no-console */
|
||||||
|
module.exports = /*#__PURE__*/function () {
|
||||||
|
function DebuggerModule() {
|
||||||
|
_classCallCheck(this, DebuggerModule);
|
||||||
|
}
|
||||||
|
return _createClass(DebuggerModule, [{
|
||||||
|
key: "optionsTransformer",
|
||||||
|
value: function optionsTransformer(options, docxtemplater) {
|
||||||
|
console.log(JSON.stringify({
|
||||||
|
options: options
|
||||||
|
}));
|
||||||
|
console.log(JSON.stringify({
|
||||||
|
files: Object.keys(docxtemplater.getZip().files)
|
||||||
|
}));
|
||||||
|
return options;
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "parse",
|
||||||
|
value: function parse() {
|
||||||
|
console.log(JSON.stringify({
|
||||||
|
msg: "parse"
|
||||||
|
}));
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "postparse",
|
||||||
|
value: function postparse(parsed) {
|
||||||
|
console.log(JSON.stringify({
|
||||||
|
msg: "postparse"
|
||||||
|
}));
|
||||||
|
return {
|
||||||
|
errors: [],
|
||||||
|
parsed: parsed
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "render",
|
||||||
|
value: function render() {
|
||||||
|
console.log(JSON.stringify({
|
||||||
|
msg: "render"
|
||||||
|
}));
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "postrender",
|
||||||
|
value: function postrender() {
|
||||||
|
console.log(JSON.stringify({
|
||||||
|
msg: "postrender"
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
}]);
|
||||||
|
}();
|
||||||
475
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/doc-utils.js
generated
vendored
Normal file
475
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/doc-utils.js
generated
vendored
Normal file
@@ -0,0 +1,475 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
function _slicedToArray(r, e) { return _arrayWithHoles(r) || _iterableToArrayLimit(r, e) || _unsupportedIterableToArray(r, e) || _nonIterableRest(); }
|
||||||
|
function _nonIterableRest() { throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); }
|
||||||
|
function _unsupportedIterableToArray(r, a) { if (r) { if ("string" == typeof r) return _arrayLikeToArray(r, a); var t = {}.toString.call(r).slice(8, -1); return "Object" === t && r.constructor && (t = r.constructor.name), "Map" === t || "Set" === t ? Array.from(r) : "Arguments" === t || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(t) ? _arrayLikeToArray(r, a) : void 0; } }
|
||||||
|
function _arrayLikeToArray(r, a) { (null == a || a > r.length) && (a = r.length); for (var e = 0, n = Array(a); e < a; e++) n[e] = r[e]; return n; }
|
||||||
|
function _iterableToArrayLimit(r, l) { var t = null == r ? null : "undefined" != typeof Symbol && r[Symbol.iterator] || r["@@iterator"]; if (null != t) { var e, n, i, u, a = [], f = !0, o = !1; try { if (i = (t = t.call(r)).next, 0 === l) { if (Object(t) !== t) return; f = !1; } else for (; !(f = (e = i.call(t)).done) && (a.push(e.value), a.length !== l); f = !0); } catch (r) { o = !0, n = r; } finally { try { if (!f && null != t["return"] && (u = t["return"](), Object(u) !== u)) return; } finally { if (o) throw n; } } return a; } }
|
||||||
|
function _arrayWithHoles(r) { if (Array.isArray(r)) return r; }
|
||||||
|
var _require = require("@xmldom/xmldom"),
|
||||||
|
DOMParser = _require.DOMParser,
|
||||||
|
XMLSerializer = _require.XMLSerializer;
|
||||||
|
var _require2 = require("./errors.js"),
|
||||||
|
throwXmlTagNotFound = _require2.throwXmlTagNotFound;
|
||||||
|
var _require3 = require("./utils.js"),
|
||||||
|
last = _require3.last,
|
||||||
|
first = _require3.first;
|
||||||
|
function isWhiteSpace(value) {
|
||||||
|
return /^[ \n\r\t]+$/.test(value);
|
||||||
|
}
|
||||||
|
function parser(tag) {
|
||||||
|
return {
|
||||||
|
get: function get(scope) {
|
||||||
|
if (tag === ".") {
|
||||||
|
return scope;
|
||||||
|
}
|
||||||
|
if (scope) {
|
||||||
|
return scope[tag];
|
||||||
|
}
|
||||||
|
return scope;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
var attrToRegex = {};
|
||||||
|
function setSingleAttribute(partValue, attr, attrValue) {
|
||||||
|
var regex;
|
||||||
|
// Stryker disable next-line all : because this is an optimisation
|
||||||
|
if (attrToRegex[attr]) {
|
||||||
|
regex = attrToRegex[attr];
|
||||||
|
} else {
|
||||||
|
regex = new RegExp("(<.* ".concat(attr, "=\")([^\"]*)(\".*)$"));
|
||||||
|
attrToRegex[attr] = regex;
|
||||||
|
}
|
||||||
|
if (regex.test(partValue)) {
|
||||||
|
return partValue.replace(regex, "$1".concat(attrValue, "$3"));
|
||||||
|
}
|
||||||
|
var end = partValue.lastIndexOf("/>");
|
||||||
|
if (end === -1) {
|
||||||
|
end = partValue.lastIndexOf(">");
|
||||||
|
}
|
||||||
|
return partValue.substr(0, end) + " ".concat(attr, "=\"").concat(attrValue, "\"") + partValue.substr(end);
|
||||||
|
}
|
||||||
|
function getSingleAttribute(value, attributeName) {
|
||||||
|
var index = value.indexOf(" ".concat(attributeName, "=\""));
|
||||||
|
if (index === -1) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
var startIndex = value.substr(index).search(/["']/) + index;
|
||||||
|
var endIndex = value.substr(startIndex + 1).search(/["']/) + startIndex;
|
||||||
|
return value.substr(startIndex + 1, endIndex - startIndex);
|
||||||
|
}
|
||||||
|
function endsWith(str, suffix) {
|
||||||
|
return str.indexOf(suffix, str.length - suffix.length) !== -1;
|
||||||
|
}
|
||||||
|
function startsWith(str, prefix) {
|
||||||
|
return str.substring(0, prefix.length) === prefix;
|
||||||
|
}
|
||||||
|
function getDuplicates(arr) {
|
||||||
|
var duplicates = [];
|
||||||
|
var hash = {},
|
||||||
|
result = [];
|
||||||
|
for (var i = 0, l = arr.length; i < l; ++i) {
|
||||||
|
if (!hash[arr[i]]) {
|
||||||
|
hash[arr[i]] = true;
|
||||||
|
result.push(arr[i]);
|
||||||
|
} else {
|
||||||
|
duplicates.push(arr[i]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return duplicates;
|
||||||
|
}
|
||||||
|
function uniq(arr) {
|
||||||
|
var hash = {},
|
||||||
|
result = [];
|
||||||
|
for (var i = 0, l = arr.length; i < l; ++i) {
|
||||||
|
if (!hash[arr[i]]) {
|
||||||
|
hash[arr[i]] = true;
|
||||||
|
result.push(arr[i]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
function chunkBy(parsed, f) {
|
||||||
|
var chunks = [[]];
|
||||||
|
for (var _i2 = 0; _i2 < parsed.length; _i2++) {
|
||||||
|
var p = parsed[_i2];
|
||||||
|
var currentChunk = chunks[chunks.length - 1];
|
||||||
|
var res = f(p);
|
||||||
|
if (res === "start") {
|
||||||
|
chunks.push([p]);
|
||||||
|
} else if (res === "end") {
|
||||||
|
currentChunk.push(p);
|
||||||
|
chunks.push([]);
|
||||||
|
} else {
|
||||||
|
currentChunk.push(p);
|
||||||
|
}
|
||||||
|
} // Remove empty chunks
|
||||||
|
var result = [];
|
||||||
|
for (var _i4 = 0; _i4 < chunks.length; _i4++) {
|
||||||
|
var chunk = chunks[_i4];
|
||||||
|
if (chunk.length > 0) {
|
||||||
|
result.push(chunk);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
function getDefaults() {
|
||||||
|
return {
|
||||||
|
errorLogging: "json",
|
||||||
|
stripInvalidXMLChars: false,
|
||||||
|
paragraphLoop: false,
|
||||||
|
nullGetter: function nullGetter(part) {
|
||||||
|
return part.module ? "" : "undefined";
|
||||||
|
},
|
||||||
|
xmlFileNames: ["[Content_Types].xml"],
|
||||||
|
parser: parser,
|
||||||
|
linebreaks: false,
|
||||||
|
fileTypeConfig: null,
|
||||||
|
delimiters: {
|
||||||
|
start: "{",
|
||||||
|
end: "}"
|
||||||
|
},
|
||||||
|
syntax: {
|
||||||
|
changeDelimiterPrefix: "="
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
function xml2str(xmlNode) {
|
||||||
|
return new XMLSerializer().serializeToString(xmlNode).replace(/xmlns(:[a-z0-9]+)?="" ?/g, "");
|
||||||
|
}
|
||||||
|
function str2xml(str) {
|
||||||
|
if (str.charCodeAt(0) === 65279) {
|
||||||
|
// BOM sequence
|
||||||
|
str = str.substr(1);
|
||||||
|
}
|
||||||
|
return new DOMParser().parseFromString(str, "text/xml");
|
||||||
|
}
|
||||||
|
var charMap = [["&", "&"], ["<", "<"], [">", ">"], ['"', """], ["'", "'"]];
|
||||||
|
var charMapRegexes = charMap.map(function (_ref) {
|
||||||
|
var _ref2 = _slicedToArray(_ref, 2),
|
||||||
|
endChar = _ref2[0],
|
||||||
|
startChar = _ref2[1];
|
||||||
|
return {
|
||||||
|
rstart: new RegExp(startChar, "g"),
|
||||||
|
rend: new RegExp(endChar, "g"),
|
||||||
|
start: startChar,
|
||||||
|
end: endChar
|
||||||
|
};
|
||||||
|
});
|
||||||
|
function wordToUtf8(string) {
|
||||||
|
for (var i = charMapRegexes.length - 1; i >= 0; i--) {
|
||||||
|
var r = charMapRegexes[i];
|
||||||
|
string = string.replace(r.rstart, r.end);
|
||||||
|
}
|
||||||
|
return string;
|
||||||
|
}
|
||||||
|
function utf8ToWord(string) {
|
||||||
|
var _string;
|
||||||
|
if ((_string = string) !== null && _string !== void 0 && _string.toString) {
|
||||||
|
// To make sure that the object given is a string (this is a noop for strings).
|
||||||
|
string = string.toString();
|
||||||
|
} else {
|
||||||
|
string = "";
|
||||||
|
}
|
||||||
|
var r;
|
||||||
|
for (var i = 0, l = charMapRegexes.length; i < l; i++) {
|
||||||
|
r = charMapRegexes[i];
|
||||||
|
string = string.replace(r.rend, r.start);
|
||||||
|
}
|
||||||
|
return string;
|
||||||
|
}
|
||||||
|
|
||||||
|
// This function is written with for loops for performance
|
||||||
|
function concatArrays(arrays) {
|
||||||
|
var result = [];
|
||||||
|
for (var _i6 = 0; _i6 < arrays.length; _i6++) {
|
||||||
|
var array = arrays[_i6];
|
||||||
|
for (var _i8 = 0; _i8 < array.length; _i8++) {
|
||||||
|
var el = array[_i8];
|
||||||
|
result.push(el);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
function pushArray(array1, array2) {
|
||||||
|
if (!array2) {
|
||||||
|
return array1;
|
||||||
|
}
|
||||||
|
for (var i = 0, len = array2.length; i < len; i++) {
|
||||||
|
array1.push(array2[i]);
|
||||||
|
}
|
||||||
|
return array1;
|
||||||
|
}
|
||||||
|
var spaceRegexp = new RegExp(String.fromCharCode(160), "g");
|
||||||
|
function convertSpaces(s) {
|
||||||
|
return s.replace(spaceRegexp, " ");
|
||||||
|
}
|
||||||
|
function pregMatchAll(regex, content) {
|
||||||
|
/*
|
||||||
|
* Regex is a string, content is the content. It returns an array of all
|
||||||
|
* matches with their offset, for example:
|
||||||
|
*
|
||||||
|
* regex=la
|
||||||
|
* content=lolalolilala
|
||||||
|
*
|
||||||
|
* Returns:
|
||||||
|
*
|
||||||
|
* [
|
||||||
|
* {array: {0: 'la'}, offset: 2},
|
||||||
|
* {array: {0: 'la'}, offset: 8},
|
||||||
|
* {array: {0: 'la'}, offset: 10}
|
||||||
|
* ]
|
||||||
|
*/
|
||||||
|
var matchArray = [];
|
||||||
|
var match;
|
||||||
|
while ((match = regex.exec(content)) != null) {
|
||||||
|
matchArray.push({
|
||||||
|
array: match,
|
||||||
|
offset: match.index
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return matchArray;
|
||||||
|
}
|
||||||
|
function isEnding(value, element) {
|
||||||
|
return value === "</" + element + ">";
|
||||||
|
}
|
||||||
|
function isStarting(value, element) {
|
||||||
|
return value.indexOf("<" + element) === 0 && [">", " ", "/"].indexOf(value[element.length + 1]) !== -1;
|
||||||
|
}
|
||||||
|
function getRight(parsed, element, index) {
|
||||||
|
var val = getRightOrNull(parsed, element, index);
|
||||||
|
if (val !== null) {
|
||||||
|
return val;
|
||||||
|
}
|
||||||
|
throwXmlTagNotFound({
|
||||||
|
position: "right",
|
||||||
|
element: element,
|
||||||
|
parsed: parsed,
|
||||||
|
index: index
|
||||||
|
});
|
||||||
|
}
|
||||||
|
function getRightOrNull(parsed, elements, index) {
|
||||||
|
if (typeof elements === "string") {
|
||||||
|
elements = [elements];
|
||||||
|
}
|
||||||
|
var level = 1;
|
||||||
|
for (var i = index, l = parsed.length; i < l; i++) {
|
||||||
|
var part = parsed[i];
|
||||||
|
for (var _i0 = 0, _elements2 = elements; _i0 < _elements2.length; _i0++) {
|
||||||
|
var element = _elements2[_i0];
|
||||||
|
if (isEnding(part.value, element)) {
|
||||||
|
level--;
|
||||||
|
}
|
||||||
|
if (isStarting(part.value, element)) {
|
||||||
|
level++;
|
||||||
|
}
|
||||||
|
if (level === 0) {
|
||||||
|
return i;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
function getLeft(parsed, element, index) {
|
||||||
|
var val = getLeftOrNull(parsed, element, index);
|
||||||
|
if (val !== null) {
|
||||||
|
return val;
|
||||||
|
}
|
||||||
|
throwXmlTagNotFound({
|
||||||
|
position: "left",
|
||||||
|
element: element,
|
||||||
|
parsed: parsed,
|
||||||
|
index: index
|
||||||
|
});
|
||||||
|
}
|
||||||
|
function getLeftOrNull(parsed, elements, index) {
|
||||||
|
if (typeof elements === "string") {
|
||||||
|
elements = [elements];
|
||||||
|
}
|
||||||
|
var level = 1;
|
||||||
|
for (var i = index; i >= 0; i--) {
|
||||||
|
var part = parsed[i];
|
||||||
|
for (var _i10 = 0, _elements4 = elements; _i10 < _elements4.length; _i10++) {
|
||||||
|
var element = _elements4[_i10];
|
||||||
|
if (isStarting(part.value, element)) {
|
||||||
|
level--;
|
||||||
|
}
|
||||||
|
if (isEnding(part.value, element)) {
|
||||||
|
level++;
|
||||||
|
}
|
||||||
|
if (level === 0) {
|
||||||
|
return i;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Stryker disable all : because those are functions that depend on the parsed
|
||||||
|
* structure based and we don't want minimal code here, but rather code that
|
||||||
|
* makes things clear.
|
||||||
|
*/
|
||||||
|
function isTagStart(tagType, _ref3) {
|
||||||
|
var type = _ref3.type,
|
||||||
|
tag = _ref3.tag,
|
||||||
|
position = _ref3.position;
|
||||||
|
return type === "tag" && tag === tagType && (position === "start" || position === "selfclosing");
|
||||||
|
}
|
||||||
|
function isTagEnd(tagType, _ref4) {
|
||||||
|
var type = _ref4.type,
|
||||||
|
tag = _ref4.tag,
|
||||||
|
position = _ref4.position;
|
||||||
|
return type === "tag" && tag === tagType && position === "end";
|
||||||
|
}
|
||||||
|
function isParagraphStart(_ref5) {
|
||||||
|
var type = _ref5.type,
|
||||||
|
tag = _ref5.tag,
|
||||||
|
position = _ref5.position;
|
||||||
|
return ["w:p", "a:p", "text:p"].indexOf(tag) !== -1 && type === "tag" && position === "start";
|
||||||
|
}
|
||||||
|
function isParagraphEnd(_ref6) {
|
||||||
|
var type = _ref6.type,
|
||||||
|
tag = _ref6.tag,
|
||||||
|
position = _ref6.position;
|
||||||
|
return ["w:p", "a:p", "text:p"].indexOf(tag) !== -1 && type === "tag" && position === "end";
|
||||||
|
}
|
||||||
|
function isTextStart(_ref7) {
|
||||||
|
var type = _ref7.type,
|
||||||
|
position = _ref7.position,
|
||||||
|
text = _ref7.text;
|
||||||
|
return text && type === "tag" && position === "start";
|
||||||
|
}
|
||||||
|
function isTextEnd(_ref8) {
|
||||||
|
var type = _ref8.type,
|
||||||
|
position = _ref8.position,
|
||||||
|
text = _ref8.text;
|
||||||
|
return text && type === "tag" && position === "end";
|
||||||
|
}
|
||||||
|
function isContent(_ref9) {
|
||||||
|
var type = _ref9.type,
|
||||||
|
position = _ref9.position;
|
||||||
|
return type === "placeholder" || type === "content" && position === "insidetag";
|
||||||
|
}
|
||||||
|
function isModule(_ref0, modules) {
|
||||||
|
var module = _ref0.module,
|
||||||
|
type = _ref0.type;
|
||||||
|
if (!(modules instanceof Array)) {
|
||||||
|
modules = [modules];
|
||||||
|
}
|
||||||
|
return type === "placeholder" && modules.indexOf(module) !== -1;
|
||||||
|
}
|
||||||
|
// Stryker restore all
|
||||||
|
|
||||||
|
var corruptCharacters = /[\x00-\x08\x0B\x0C\x0E-\x1F]/g;
|
||||||
|
/*
|
||||||
|
* 00 NUL '\0' (null character)
|
||||||
|
* 01 SOH (start of heading)
|
||||||
|
* 02 STX (start of text)
|
||||||
|
* 03 ETX (end of text)
|
||||||
|
* 04 EOT (end of transmission)
|
||||||
|
* 05 ENQ (enquiry)
|
||||||
|
* 06 ACK (acknowledge)
|
||||||
|
* 07 BEL '\a' (bell)
|
||||||
|
* 08 BS '\b' (backspace)
|
||||||
|
* 0B VT '\v' (vertical tab)
|
||||||
|
* 0C FF '\f' (form feed)
|
||||||
|
* 0E SO (shift out)
|
||||||
|
* 0F SI (shift in)
|
||||||
|
* 10 DLE (data link escape)
|
||||||
|
* 11 DC1 (device control 1)
|
||||||
|
* 12 DC2 (device control 2)
|
||||||
|
* 13 DC3 (device control 3)
|
||||||
|
* 14 DC4 (device control 4)
|
||||||
|
* 15 NAK (negative ack.)
|
||||||
|
* 16 SYN (synchronous idle)
|
||||||
|
* 17 ETB (end of trans. blk)
|
||||||
|
* 18 CAN (cancel)
|
||||||
|
* 19 EM (end of medium)
|
||||||
|
* 1A SUB (substitute)
|
||||||
|
* 1B ESC (escape)
|
||||||
|
* 1C FS (file separator)
|
||||||
|
* 1D GS (group separator)
|
||||||
|
* 1E RS (record separator)
|
||||||
|
* 1F US (unit separator)
|
||||||
|
*/
|
||||||
|
function hasCorruptCharacters(string) {
|
||||||
|
corruptCharacters.lastIndex = 0;
|
||||||
|
return corruptCharacters.test(string);
|
||||||
|
}
|
||||||
|
function removeCorruptCharacters(string) {
|
||||||
|
if (typeof string !== "string") {
|
||||||
|
string = String(string);
|
||||||
|
}
|
||||||
|
return string.replace(corruptCharacters, "");
|
||||||
|
}
|
||||||
|
function invertMap(map) {
|
||||||
|
var invertedMap = {};
|
||||||
|
for (var key in map) {
|
||||||
|
var value = map[key];
|
||||||
|
invertedMap[value] || (invertedMap[value] = []);
|
||||||
|
invertedMap[value].push(key);
|
||||||
|
}
|
||||||
|
return invertedMap;
|
||||||
|
}
|
||||||
|
/*
|
||||||
|
* This ensures that the sort is stable. The default Array.sort of the browser
|
||||||
|
* is not stable in firefox, as the JS spec does not enforce the sort to be
|
||||||
|
* stable.
|
||||||
|
*/
|
||||||
|
function stableSort(arr, compare) {
|
||||||
|
// Stryker disable all : in previous versions of Chrome, sort was not stable by itself, so we had to add this. This is to support older versions of JS runners.
|
||||||
|
return arr.map(function (item, index) {
|
||||||
|
return {
|
||||||
|
item: item,
|
||||||
|
index: index
|
||||||
|
};
|
||||||
|
}).sort(function (a, b) {
|
||||||
|
return compare(a.item, b.item) || a.index - b.index;
|
||||||
|
}).map(function (_ref1) {
|
||||||
|
var item = _ref1.item;
|
||||||
|
return item;
|
||||||
|
});
|
||||||
|
// Stryker restore all
|
||||||
|
}
|
||||||
|
module.exports = {
|
||||||
|
endsWith: endsWith,
|
||||||
|
startsWith: startsWith,
|
||||||
|
isContent: isContent,
|
||||||
|
isParagraphStart: isParagraphStart,
|
||||||
|
isParagraphEnd: isParagraphEnd,
|
||||||
|
isTagStart: isTagStart,
|
||||||
|
isTagEnd: isTagEnd,
|
||||||
|
isTextStart: isTextStart,
|
||||||
|
isTextEnd: isTextEnd,
|
||||||
|
isStarting: isStarting,
|
||||||
|
isEnding: isEnding,
|
||||||
|
isModule: isModule,
|
||||||
|
uniq: uniq,
|
||||||
|
getDuplicates: getDuplicates,
|
||||||
|
chunkBy: chunkBy,
|
||||||
|
last: last,
|
||||||
|
first: first,
|
||||||
|
xml2str: xml2str,
|
||||||
|
str2xml: str2xml,
|
||||||
|
getRightOrNull: getRightOrNull,
|
||||||
|
getRight: getRight,
|
||||||
|
getLeftOrNull: getLeftOrNull,
|
||||||
|
getLeft: getLeft,
|
||||||
|
pregMatchAll: pregMatchAll,
|
||||||
|
convertSpaces: convertSpaces,
|
||||||
|
charMapRegexes: charMapRegexes,
|
||||||
|
hasCorruptCharacters: hasCorruptCharacters,
|
||||||
|
removeCorruptCharacters: removeCorruptCharacters,
|
||||||
|
getDefaults: getDefaults,
|
||||||
|
wordToUtf8: wordToUtf8,
|
||||||
|
utf8ToWord: utf8ToWord,
|
||||||
|
concatArrays: concatArrays,
|
||||||
|
pushArray: pushArray,
|
||||||
|
invertMap: invertMap,
|
||||||
|
charMap: charMap,
|
||||||
|
getSingleAttribute: getSingleAttribute,
|
||||||
|
setSingleAttribute: setSingleAttribute,
|
||||||
|
isWhiteSpace: isWhiteSpace,
|
||||||
|
stableSort: stableSort
|
||||||
|
};
|
||||||
229
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/docxtemplater.d.ts
generated
vendored
Normal file
229
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/docxtemplater.d.ts
generated
vendored
Normal file
@@ -0,0 +1,229 @@
|
|||||||
|
declare namespace DXT {
|
||||||
|
type integer = number;
|
||||||
|
|
||||||
|
interface SimplePart {
|
||||||
|
type: string;
|
||||||
|
value: string;
|
||||||
|
module?: string;
|
||||||
|
[x: string]: any;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface Part {
|
||||||
|
type: string;
|
||||||
|
value: string;
|
||||||
|
module: string;
|
||||||
|
raw: string;
|
||||||
|
offset: integer;
|
||||||
|
lIndex: integer;
|
||||||
|
num: integer;
|
||||||
|
inverted?: boolean;
|
||||||
|
endLindex?: integer;
|
||||||
|
expanded?: Part[];
|
||||||
|
subparsed?: Part[];
|
||||||
|
position?: string;
|
||||||
|
tag?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ScopeManager {
|
||||||
|
scopeList: any[];
|
||||||
|
scopeLindex: integer[];
|
||||||
|
scopePath: string[];
|
||||||
|
scopePathItem: integer[];
|
||||||
|
scopePathLength: integer[];
|
||||||
|
resolved: any;
|
||||||
|
cachedParsers: Record<
|
||||||
|
string,
|
||||||
|
(scope: any, context: ParserContext) => any
|
||||||
|
>;
|
||||||
|
parser(tag: string): Parser;
|
||||||
|
getValue(value: string, { part: Part }): any;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface Rendered {
|
||||||
|
value: string;
|
||||||
|
errors: any[];
|
||||||
|
}
|
||||||
|
|
||||||
|
type Error = any;
|
||||||
|
type Compression = "STORE" | "DEFLATE";
|
||||||
|
|
||||||
|
interface ZipOptions {
|
||||||
|
/**
|
||||||
|
* the default file compression method to use. Available methods are `STORE` and `DEFLATE`. You can also provide your own compression method.
|
||||||
|
* @default "DEFLATE"
|
||||||
|
*/
|
||||||
|
compression?: Compression | undefined;
|
||||||
|
/**
|
||||||
|
* the options to use when compressing the file. With `STORE` (no compression), this parameter is ignored.
|
||||||
|
* With `DEFLATE`, you can give the compression level with `compressionOptions : {level:6}`
|
||||||
|
* (or any level between 1 (best speed) and 9 (best compression)).
|
||||||
|
*
|
||||||
|
* Note : if the entry is already compressed (coming from a compressed zip file),
|
||||||
|
* calling `generate()` with a different compression level won't update the entry.
|
||||||
|
* The reason is simple : PizZip doesn't know how compressed the content was and how to match the compression level with the implementation we use.
|
||||||
|
*/
|
||||||
|
compressionOptions?:
|
||||||
|
| {
|
||||||
|
level: 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9;
|
||||||
|
}
|
||||||
|
| null
|
||||||
|
| undefined;
|
||||||
|
/**
|
||||||
|
* The comment to use for the zip file.
|
||||||
|
*/
|
||||||
|
comment?: string | undefined;
|
||||||
|
/**
|
||||||
|
* The platform to use when generating the zip file. When using `DOS`, the attribute `dosPermissions` of each file is used.
|
||||||
|
* When using `UNIX`, the attribute `unixPermissions` of each file is used.
|
||||||
|
* If you set the platform value on nodejs, be sure to use `process.platform`.
|
||||||
|
* `fs.stats` returns a non executable mode for folders on windows,
|
||||||
|
* if you force the platform to `UNIX` the generated zip file will have a strange behavior on UNIX platforms.
|
||||||
|
* @default "DOS"
|
||||||
|
*/
|
||||||
|
platform?: "DOS" | "UNIX" | NodeJS.Platform | undefined;
|
||||||
|
/**
|
||||||
|
* The function to encode the file name / comment.
|
||||||
|
* By default, PizZip uses UTF-8 to encode the file names / comments. You can use this method to force an other encoding.
|
||||||
|
* Note : the encoding used is not stored in a zip file, not using UTF-8 may lead to encoding issues.
|
||||||
|
* The function takes a string and returns a bytes array (Uint8Array or Array).
|
||||||
|
*/
|
||||||
|
encodeFileName?(name: string): Buffer;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The function to change the ordering of the files in the zip archive.
|
||||||
|
* The function takes the files array and returns the list of files in the order that you want them to be in the final zip file.
|
||||||
|
*/
|
||||||
|
fileOrder?(files: string[]): string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
interface RenderOptions {
|
||||||
|
joinUncorrupt(parts: Part[], options: RenderOptions): Part[];
|
||||||
|
render(part: Part, options: RenderOptions): Rendered | null;
|
||||||
|
nullGetter?(part: Part, scopeManager: ScopeManager): any;
|
||||||
|
resolvedId: string;
|
||||||
|
index: number;
|
||||||
|
scopeManager: ScopeManager;
|
||||||
|
stripInvalidXMLChars: boolean;
|
||||||
|
linebreaks: boolean;
|
||||||
|
fileType: string;
|
||||||
|
fileTypeConfig: any;
|
||||||
|
filePath: string;
|
||||||
|
contentType: string;
|
||||||
|
parser: Parser;
|
||||||
|
cachedParsers: Record<
|
||||||
|
string,
|
||||||
|
(scope: any, context: ParserContext) => any
|
||||||
|
>;
|
||||||
|
compiled: Part[];
|
||||||
|
}
|
||||||
|
|
||||||
|
interface Module {
|
||||||
|
set?(options: any): void;
|
||||||
|
clone?(): Module;
|
||||||
|
matchers?(): [
|
||||||
|
string,
|
||||||
|
string,
|
||||||
|
{ [x: string]: any } | ((part: SimplePart) => { [x: string]: any }),
|
||||||
|
][];
|
||||||
|
render?(part: Part, options: RenderOptions): Rendered | null;
|
||||||
|
getTraits?(traitName: string, parsed: any): any;
|
||||||
|
getFileType?(opts: any): string | void;
|
||||||
|
nullGetter?(part: Part, scopeManager: ScopeManager): any;
|
||||||
|
optionsTransformer?(options: Options, doc: Docxtemplater): Options;
|
||||||
|
postrender?(parts: string[], options: any): string[];
|
||||||
|
errorsTransformer?(errors: Error[]): Error[];
|
||||||
|
getRenderedMap?(map: any): any;
|
||||||
|
preparse?(parsed: any, options: any): any;
|
||||||
|
parse?(placeHolderContent: string): SimplePart | null;
|
||||||
|
postparse?(postparsed: Part[], modules: Module[], options: any): Part[];
|
||||||
|
on?(event: string): void;
|
||||||
|
preResolve?(options: any): void;
|
||||||
|
resolve?(part: Part, options: any): null | Promise<any>;
|
||||||
|
preZip?(content: string, currentFile: string): null | string;
|
||||||
|
|
||||||
|
[x: string]: any;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ParserContext {
|
||||||
|
meta: {
|
||||||
|
part: Part;
|
||||||
|
};
|
||||||
|
scopeList: any[];
|
||||||
|
scopePath: string[];
|
||||||
|
scopePathItem: integer[];
|
||||||
|
scopePathLength: integer[];
|
||||||
|
num: integer;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface Parser {
|
||||||
|
get(scope: any, context: ParserContext): any;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface Syntax {
|
||||||
|
allowUnopenedTag?: boolean;
|
||||||
|
allowUnclosedTag?: boolean;
|
||||||
|
allowUnbalancedLoops?: boolean;
|
||||||
|
changeDelimiterPrefix?: string | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface Options {
|
||||||
|
delimiters?: { start: string | null; end: string | null };
|
||||||
|
paragraphLoop?: boolean;
|
||||||
|
parser?(tag: string): Parser;
|
||||||
|
errorLogging?: boolean | string;
|
||||||
|
linebreaks?: boolean;
|
||||||
|
nullGetter?(part: Part, scopeManager: ScopeManager): any;
|
||||||
|
fileTypeConfig?: any;
|
||||||
|
syntax?: Syntax;
|
||||||
|
stripInvalidXMLChars?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ConstructorOptions extends Options {
|
||||||
|
modules?: Module[];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
declare class Docxtemplater<TZip = any> {
|
||||||
|
static default: typeof Docxtemplater;
|
||||||
|
/**
|
||||||
|
* Create Docxtemplater instance (and compile it on the fly)
|
||||||
|
*
|
||||||
|
* @param zip Serialized zip archive
|
||||||
|
* @param options `modules` and other options
|
||||||
|
*/
|
||||||
|
constructor(zip: TZip, options?: DXT.ConstructorOptions);
|
||||||
|
/**
|
||||||
|
* Create Docxtemplater instance, without options
|
||||||
|
*/
|
||||||
|
constructor();
|
||||||
|
|
||||||
|
setData(data: any): this;
|
||||||
|
resolveData(data: any): Promise<any>;
|
||||||
|
render(data?: any): this;
|
||||||
|
renderAsync(data?: any): Promise<any>;
|
||||||
|
getZip(): TZip;
|
||||||
|
|
||||||
|
loadZip(zip: TZip): this;
|
||||||
|
setOptions(options: DXT.Options): this;
|
||||||
|
attachModule(module: DXT.Module): this;
|
||||||
|
compile(): this;
|
||||||
|
getFullText(path?: string): string;
|
||||||
|
targets: string[]; // used to know which files are templated
|
||||||
|
replaceFirstSection?: boolean; // used for the subsection module
|
||||||
|
replaceLastSection?: boolean; // used for the subsection module
|
||||||
|
includeSections?: boolean; // used for the subsection module
|
||||||
|
keepStyles?: boolean; // used for the subtemplate module
|
||||||
|
modules: DXT.Module[];
|
||||||
|
|
||||||
|
toBuffer(options?: DXT.ZipOptions): Buffer;
|
||||||
|
toBlob(options?: DXT.ZipOptions): Blob;
|
||||||
|
toBase64(options?: DXT.ZipOptions): string;
|
||||||
|
toUint8Array(options?: DXT.ZipOptions): Uint8Array;
|
||||||
|
toArrayBuffer(options?: DXT.ZipOptions): ArrayBuffer;
|
||||||
|
}
|
||||||
|
|
||||||
|
declare namespace Docxtemplater {
|
||||||
|
export { DXT };
|
||||||
|
}
|
||||||
|
|
||||||
|
export = Docxtemplater;
|
||||||
826
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/docxtemplater.js
generated
vendored
Normal file
826
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/docxtemplater.js
generated
vendored
Normal file
@@ -0,0 +1,826 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
var _excluded = ["modules"];
|
||||||
|
function ownKeys(e, r) { var t = Object.keys(e); if (Object.getOwnPropertySymbols) { var o = Object.getOwnPropertySymbols(e); r && (o = o.filter(function (r) { return Object.getOwnPropertyDescriptor(e, r).enumerable; })), t.push.apply(t, o); } return t; }
|
||||||
|
function _objectSpread(e) { for (var r = 1; r < arguments.length; r++) { var t = null != arguments[r] ? arguments[r] : {}; r % 2 ? ownKeys(Object(t), !0).forEach(function (r) { _defineProperty(e, r, t[r]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(e, Object.getOwnPropertyDescriptors(t)) : ownKeys(Object(t)).forEach(function (r) { Object.defineProperty(e, r, Object.getOwnPropertyDescriptor(t, r)); }); } return e; }
|
||||||
|
function _defineProperty(e, r, t) { return (r = _toPropertyKey(r)) in e ? Object.defineProperty(e, r, { value: t, enumerable: !0, configurable: !0, writable: !0 }) : e[r] = t, e; }
|
||||||
|
function _slicedToArray(r, e) { return _arrayWithHoles(r) || _iterableToArrayLimit(r, e) || _unsupportedIterableToArray(r, e) || _nonIterableRest(); }
|
||||||
|
function _nonIterableRest() { throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); }
|
||||||
|
function _unsupportedIterableToArray(r, a) { if (r) { if ("string" == typeof r) return _arrayLikeToArray(r, a); var t = {}.toString.call(r).slice(8, -1); return "Object" === t && r.constructor && (t = r.constructor.name), "Map" === t || "Set" === t ? Array.from(r) : "Arguments" === t || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(t) ? _arrayLikeToArray(r, a) : void 0; } }
|
||||||
|
function _arrayLikeToArray(r, a) { (null == a || a > r.length) && (a = r.length); for (var e = 0, n = Array(a); e < a; e++) n[e] = r[e]; return n; }
|
||||||
|
function _iterableToArrayLimit(r, l) { var t = null == r ? null : "undefined" != typeof Symbol && r[Symbol.iterator] || r["@@iterator"]; if (null != t) { var e, n, i, u, a = [], f = !0, o = !1; try { if (i = (t = t.call(r)).next, 0 === l) { if (Object(t) !== t) return; f = !1; } else for (; !(f = (e = i.call(t)).done) && (a.push(e.value), a.length !== l); f = !0); } catch (r) { o = !0, n = r; } finally { try { if (!f && null != t["return"] && (u = t["return"](), Object(u) !== u)) return; } finally { if (o) throw n; } } return a; } }
|
||||||
|
function _arrayWithHoles(r) { if (Array.isArray(r)) return r; }
|
||||||
|
function _typeof(o) { "@babel/helpers - typeof"; return _typeof = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function (o) { return typeof o; } : function (o) { return o && "function" == typeof Symbol && o.constructor === Symbol && o !== Symbol.prototype ? "symbol" : typeof o; }, _typeof(o); }
|
||||||
|
function _objectWithoutProperties(e, t) { if (null == e) return {}; var o, r, i = _objectWithoutPropertiesLoose(e, t); if (Object.getOwnPropertySymbols) { var n = Object.getOwnPropertySymbols(e); for (r = 0; r < n.length; r++) o = n[r], -1 === t.indexOf(o) && {}.propertyIsEnumerable.call(e, o) && (i[o] = e[o]); } return i; }
|
||||||
|
function _objectWithoutPropertiesLoose(r, e) { if (null == r) return {}; var t = {}; for (var n in r) if ({}.hasOwnProperty.call(r, n)) { if (-1 !== e.indexOf(n)) continue; t[n] = r[n]; } return t; }
|
||||||
|
function _classCallCheck(a, n) { if (!(a instanceof n)) throw new TypeError("Cannot call a class as a function"); }
|
||||||
|
function _defineProperties(e, r) { for (var t = 0; t < r.length; t++) { var o = r[t]; o.enumerable = o.enumerable || !1, o.configurable = !0, "value" in o && (o.writable = !0), Object.defineProperty(e, _toPropertyKey(o.key), o); } }
|
||||||
|
function _createClass(e, r, t) { return r && _defineProperties(e.prototype, r), t && _defineProperties(e, t), Object.defineProperty(e, "prototype", { writable: !1 }), e; }
|
||||||
|
function _toPropertyKey(t) { var i = _toPrimitive(t, "string"); return "symbol" == _typeof(i) ? i : i + ""; }
|
||||||
|
function _toPrimitive(t, r) { if ("object" != _typeof(t) || !t) return t; var e = t[Symbol.toPrimitive]; if (void 0 !== e) { var i = e.call(t, r || "default"); if ("object" != _typeof(i)) return i; throw new TypeError("@@toPrimitive must return a primitive value."); } return ("string" === r ? String : Number)(t); }
|
||||||
|
var DocUtils = require("./doc-utils.js");
|
||||||
|
var z = require("./minizod.js");
|
||||||
|
|
||||||
|
// Schema definitions for DXT.ConstructorOptions
|
||||||
|
var dxtSyntaxSchema = z.object({
|
||||||
|
allowUnopenedTag: z["boolean"]().optional(),
|
||||||
|
allowUnclosedTag: z["boolean"]().optional(),
|
||||||
|
allowUnbalancedLoops: z["boolean"]().optional(),
|
||||||
|
changeDelimiterPrefix: z.string().optional().nullable()
|
||||||
|
});
|
||||||
|
var dxtOptionsSchema = z.object({
|
||||||
|
delimiters: z.object({
|
||||||
|
start: z.string().nullable(),
|
||||||
|
end: z.string().nullable()
|
||||||
|
}).strict().optional(),
|
||||||
|
fileTypeConfig: z.object({}).optional(),
|
||||||
|
paragraphLoop: z["boolean"]().optional(),
|
||||||
|
parser: z["function"]().optional(),
|
||||||
|
errorLogging: z.union([z["boolean"](), z.string()]).optional(),
|
||||||
|
linebreaks: z["boolean"]().optional(),
|
||||||
|
nullGetter: z["function"]().optional(),
|
||||||
|
syntax: dxtSyntaxSchema.optional(),
|
||||||
|
stripInvalidXMLChars: z["boolean"]().optional()
|
||||||
|
}).strict();
|
||||||
|
var _require = require("./get-relation-types.js"),
|
||||||
|
getRelsTypes = _require.getRelsTypes;
|
||||||
|
var _require2 = require("./get-content-types.js"),
|
||||||
|
collectContentTypes = _require2.collectContentTypes,
|
||||||
|
getContentTypes = _require2.getContentTypes;
|
||||||
|
var moduleWrapper = require("./module-wrapper.js");
|
||||||
|
var traits = require("./traits.js");
|
||||||
|
var commonModule = require("./modules/common.js");
|
||||||
|
var createScope = require("./scope-manager.js");
|
||||||
|
var Lexer = require("./lexer.js");
|
||||||
|
var _require3 = require("./get-tags.js"),
|
||||||
|
_getTags = _require3.getTags;
|
||||||
|
var logErrors = require("./error-logger.js");
|
||||||
|
var _require4 = require("./errors.js"),
|
||||||
|
throwMultiError = _require4.throwMultiError,
|
||||||
|
throwResolveBeforeCompile = _require4.throwResolveBeforeCompile,
|
||||||
|
throwRenderInvalidTemplate = _require4.throwRenderInvalidTemplate,
|
||||||
|
throwRenderTwice = _require4.throwRenderTwice,
|
||||||
|
XTInternalError = _require4.XTInternalError,
|
||||||
|
XTTemplateError = _require4.XTTemplateError,
|
||||||
|
throwFileTypeNotIdentified = _require4.throwFileTypeNotIdentified,
|
||||||
|
throwFileTypeNotHandled = _require4.throwFileTypeNotHandled,
|
||||||
|
throwApiVersionError = _require4.throwApiVersionError;
|
||||||
|
DocUtils.getRelsTypes = getRelsTypes;
|
||||||
|
DocUtils.traits = traits;
|
||||||
|
DocUtils.moduleWrapper = moduleWrapper;
|
||||||
|
DocUtils.collectContentTypes = collectContentTypes;
|
||||||
|
DocUtils.getContentTypes = getContentTypes;
|
||||||
|
var getDefaults = DocUtils.getDefaults,
|
||||||
|
str2xml = DocUtils.str2xml,
|
||||||
|
xml2str = DocUtils.xml2str,
|
||||||
|
concatArrays = DocUtils.concatArrays,
|
||||||
|
uniq = DocUtils.uniq,
|
||||||
|
getDuplicates = DocUtils.getDuplicates,
|
||||||
|
stableSort = DocUtils.stableSort,
|
||||||
|
pushArray = DocUtils.pushArray,
|
||||||
|
utf8ToWord = DocUtils.utf8ToWord,
|
||||||
|
invertMap = DocUtils.invertMap;
|
||||||
|
var ctXML = "[Content_Types].xml";
|
||||||
|
var relsFile = "_rels/.rels";
|
||||||
|
var currentModuleApiVersion = [3, 47, 2];
|
||||||
|
function throwIfDuplicateModules(modules) {
|
||||||
|
var duplicates = getDuplicates(modules.map(function (_ref) {
|
||||||
|
var name = _ref.name;
|
||||||
|
return name;
|
||||||
|
}));
|
||||||
|
if (duplicates.length > 0) {
|
||||||
|
throw new XTInternalError("Detected duplicate module \"".concat(duplicates[0], "\""));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function addXmlFileNamesFromXmlContentType(doc) {
|
||||||
|
for (var _i2 = 0, _doc$modules2 = doc.modules; _i2 < _doc$modules2.length; _i2++) {
|
||||||
|
var _module = _doc$modules2[_i2];
|
||||||
|
for (var _i4 = 0, _ref3 = _module.xmlContentTypes || []; _i4 < _ref3.length; _i4++) {
|
||||||
|
var contentType = _ref3[_i4];
|
||||||
|
var candidates = doc.invertedContentTypes[contentType] || [];
|
||||||
|
for (var _i6 = 0; _i6 < candidates.length; _i6++) {
|
||||||
|
var candidate = candidates[_i6];
|
||||||
|
if (doc.zip.files[candidate]) {
|
||||||
|
doc.options.xmlFileNames.push(candidate);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function reorderModules(modules) {
|
||||||
|
/**
|
||||||
|
* Modules will be sorted according to priority.
|
||||||
|
*
|
||||||
|
* Input example:
|
||||||
|
* [
|
||||||
|
* { priority: 1, name: "FooMod" },
|
||||||
|
* { priority: -1, name: "XMod" },
|
||||||
|
* { priority: 4, name: "OtherMod" }
|
||||||
|
* ]
|
||||||
|
*
|
||||||
|
* Output example (sorted by priority in descending order):
|
||||||
|
* [
|
||||||
|
* { priority: 4, name: "OtherMod" },
|
||||||
|
* { priority: 1, name: "FooMod" },
|
||||||
|
* { priority: -1, name: "XMod" }
|
||||||
|
* ]
|
||||||
|
* Tested in #test-reorder-modules
|
||||||
|
*/
|
||||||
|
return stableSort(modules, function (m1, m2) {
|
||||||
|
return (m2.priority || 0) - (m1.priority || 0);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
function zipFileOrder(files) {
|
||||||
|
var allFiles = [];
|
||||||
|
for (var name in files) {
|
||||||
|
allFiles.push(name);
|
||||||
|
}
|
||||||
|
/*
|
||||||
|
* The first files that need to be put in the zip file are :
|
||||||
|
* [Content_Types].xml and _rels/.rels
|
||||||
|
*/
|
||||||
|
var resultFiles = [ctXML, relsFile];
|
||||||
|
|
||||||
|
/*
|
||||||
|
* The next files that should be in the zip file are :
|
||||||
|
*
|
||||||
|
* - word/* (ie word/document.xml, word/header1.xml, ...)
|
||||||
|
* - xl/* (ie xl/worksheets/sheet1.xml)
|
||||||
|
* - ppt/* (ie ppt/slides/slide1.xml)
|
||||||
|
*/
|
||||||
|
var prefixes = ["word/", "xl/", "ppt/"];
|
||||||
|
for (var _i8 = 0; _i8 < allFiles.length; _i8++) {
|
||||||
|
var _name = allFiles[_i8];
|
||||||
|
for (var _i0 = 0; _i0 < prefixes.length; _i0++) {
|
||||||
|
var prefix = prefixes[_i0];
|
||||||
|
if (_name.indexOf("".concat(prefix)) === 0) {
|
||||||
|
resultFiles.push(_name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/*
|
||||||
|
* Push the rest of files, such as docProps/core.xml and docProps/app.xml
|
||||||
|
*/
|
||||||
|
for (var _i10 = 0; _i10 < allFiles.length; _i10++) {
|
||||||
|
var _name2 = allFiles[_i10];
|
||||||
|
if (resultFiles.indexOf(_name2) === -1) {
|
||||||
|
resultFiles.push(_name2);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return resultFiles;
|
||||||
|
}
|
||||||
|
function deprecatedMessage(obj, message) {
|
||||||
|
if (obj.hideDeprecations === true) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// eslint-disable-next-line no-console
|
||||||
|
console.warn(message);
|
||||||
|
}
|
||||||
|
function deprecatedMethod(obj, method) {
|
||||||
|
if (obj.hideDeprecations === true) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
return deprecatedMessage(obj, "Deprecated method \".".concat(method, "\", view upgrade guide : https://docxtemplater.com/docs/api/#upgrade-guide, stack : ").concat(new Error().stack));
|
||||||
|
}
|
||||||
|
function dropUnsupportedFileTypesModules(doc) {
|
||||||
|
doc.modules = doc.modules.filter(function (module) {
|
||||||
|
if (!module.supportedFileTypes) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (!Array.isArray(module.supportedFileTypes)) {
|
||||||
|
throw new Error("The supportedFileTypes field of the module must be an array");
|
||||||
|
}
|
||||||
|
var isSupportedModule = module.supportedFileTypes.includes(doc.fileType);
|
||||||
|
if (!isSupportedModule) {
|
||||||
|
module.on("detached");
|
||||||
|
}
|
||||||
|
return isSupportedModule;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
function verifyErrors(doc) {
|
||||||
|
var compiled = doc.compiled;
|
||||||
|
doc.errors = concatArrays(Object.keys(compiled).map(function (name) {
|
||||||
|
return compiled[name].allErrors;
|
||||||
|
}));
|
||||||
|
if (doc.errors.length !== 0) {
|
||||||
|
if (doc.options.errorLogging) {
|
||||||
|
logErrors(doc.errors, doc.options.errorLogging);
|
||||||
|
}
|
||||||
|
throwMultiError(doc.errors);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function isBuffer(v) {
|
||||||
|
return typeof Buffer !== "undefined" && typeof Buffer.isBuffer === "function" && Buffer.isBuffer(v);
|
||||||
|
}
|
||||||
|
var Docxtemplater = /*#__PURE__*/function () {
|
||||||
|
function Docxtemplater(zip) {
|
||||||
|
var _ref4 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},
|
||||||
|
_ref4$modules = _ref4.modules,
|
||||||
|
modules = _ref4$modules === void 0 ? [] : _ref4$modules,
|
||||||
|
options = _objectWithoutProperties(_ref4, _excluded);
|
||||||
|
_classCallCheck(this, Docxtemplater);
|
||||||
|
this.targets = [];
|
||||||
|
this.rendered = false;
|
||||||
|
this.scopeManagers = {};
|
||||||
|
this.compiled = {};
|
||||||
|
this.modules = [commonModule()];
|
||||||
|
this.xmlDocuments = {};
|
||||||
|
if (arguments.length === 0) {
|
||||||
|
deprecatedMessage(this, "Deprecated docxtemplater constructor with no arguments, view upgrade guide : https://docxtemplater.com/docs/api/#upgrade-guide, stack : ".concat(new Error().stack));
|
||||||
|
this.hideDeprecations = true;
|
||||||
|
this.setOptions(options);
|
||||||
|
} else {
|
||||||
|
this.hideDeprecations = true;
|
||||||
|
this.setOptions(options);
|
||||||
|
if (isBuffer(zip)) {
|
||||||
|
throw new Error("You passed a Buffer to the Docxtemplater constructor. The first argument of docxtemplater's constructor must be a valid zip file (jszip v2 or pizzip v3)");
|
||||||
|
}
|
||||||
|
if (!zip || !zip.files || typeof zip.file !== "function") {
|
||||||
|
throw new Error("The first argument of docxtemplater's constructor must be a valid zip file (jszip v2 or pizzip v3)");
|
||||||
|
}
|
||||||
|
if (!Array.isArray(modules)) {
|
||||||
|
throw new Error("The modules argument of docxtemplater's constructor must be an array");
|
||||||
|
}
|
||||||
|
for (var _i12 = 0; _i12 < modules.length; _i12++) {
|
||||||
|
var _module2 = modules[_i12];
|
||||||
|
this.attachModule(_module2);
|
||||||
|
}
|
||||||
|
this.loadZip(zip);
|
||||||
|
this.compile();
|
||||||
|
this.v4Constructor = true;
|
||||||
|
}
|
||||||
|
this.hideDeprecations = false;
|
||||||
|
}
|
||||||
|
return _createClass(Docxtemplater, [{
|
||||||
|
key: "verifyApiVersion",
|
||||||
|
value: function verifyApiVersion(neededVersion) {
|
||||||
|
neededVersion = neededVersion.split(".").map(function (i) {
|
||||||
|
return parseInt(i, 10);
|
||||||
|
});
|
||||||
|
if (neededVersion.length !== 3) {
|
||||||
|
throwApiVersionError("neededVersion is not a valid version", {
|
||||||
|
neededVersion: neededVersion,
|
||||||
|
explanation: "the neededVersion must be an array of length 3"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
if (neededVersion[0] !== currentModuleApiVersion[0]) {
|
||||||
|
throwApiVersionError("The major api version do not match, you probably have to update docxtemplater with npm install --save docxtemplater", {
|
||||||
|
neededVersion: neededVersion,
|
||||||
|
currentModuleApiVersion: currentModuleApiVersion,
|
||||||
|
explanation: "moduleAPIVersionMismatch : needed=".concat(neededVersion.join("."), ", current=").concat(currentModuleApiVersion.join("."))
|
||||||
|
});
|
||||||
|
}
|
||||||
|
if (neededVersion[1] > currentModuleApiVersion[1]) {
|
||||||
|
throwApiVersionError("The minor api version is not uptodate, you probably have to update docxtemplater with npm install --save docxtemplater", {
|
||||||
|
neededVersion: neededVersion,
|
||||||
|
currentModuleApiVersion: currentModuleApiVersion,
|
||||||
|
explanation: "moduleAPIVersionMismatch : needed=".concat(neededVersion.join("."), ", current=").concat(currentModuleApiVersion.join("."))
|
||||||
|
});
|
||||||
|
}
|
||||||
|
if (neededVersion[1] === currentModuleApiVersion[1] && neededVersion[2] > currentModuleApiVersion[2]) {
|
||||||
|
throwApiVersionError("The patch api version is not uptodate, you probably have to update docxtemplater with npm install --save docxtemplater", {
|
||||||
|
neededVersion: neededVersion,
|
||||||
|
currentModuleApiVersion: currentModuleApiVersion,
|
||||||
|
explanation: "moduleAPIVersionMismatch : needed=".concat(neededVersion.join("."), ", current=").concat(currentModuleApiVersion.join("."))
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "setModules",
|
||||||
|
value: function setModules(obj) {
|
||||||
|
for (var _i14 = 0, _this$modules2 = this.modules; _i14 < _this$modules2.length; _i14++) {
|
||||||
|
var _module3 = _this$modules2[_i14];
|
||||||
|
_module3.set(obj);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "sendEvent",
|
||||||
|
value: function sendEvent(eventName) {
|
||||||
|
for (var _i16 = 0, _this$modules4 = this.modules; _i16 < _this$modules4.length; _i16++) {
|
||||||
|
var _module4 = _this$modules4[_i16];
|
||||||
|
_module4.on(eventName);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "attachModule",
|
||||||
|
value: function attachModule(module) {
|
||||||
|
if (this.v4Constructor) {
|
||||||
|
throw new XTInternalError("attachModule() should not be called manually when using the v4 constructor");
|
||||||
|
}
|
||||||
|
deprecatedMethod(this, "attachModule");
|
||||||
|
var moduleType = _typeof(module);
|
||||||
|
if (moduleType === "function") {
|
||||||
|
throw new XTInternalError("Cannot attach a class/function as a module. Most probably you forgot to instantiate the module by using `new` on the module.");
|
||||||
|
}
|
||||||
|
if (!module || moduleType !== "object") {
|
||||||
|
throw new XTInternalError("Cannot attachModule with a falsy value");
|
||||||
|
}
|
||||||
|
if (module.requiredAPIVersion) {
|
||||||
|
this.verifyApiVersion(module.requiredAPIVersion);
|
||||||
|
}
|
||||||
|
if (module.attached === true) {
|
||||||
|
if (typeof module.clone === "function") {
|
||||||
|
module = module.clone();
|
||||||
|
} else {
|
||||||
|
throw new Error("Cannot attach a module that was already attached : \"".concat(module.name, "\". The most likely cause is that you are instantiating the module at the root level, and using it for multiple instances of Docxtemplater"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
module.attached = true;
|
||||||
|
var wrappedModule = moduleWrapper(module);
|
||||||
|
this.modules.push(wrappedModule);
|
||||||
|
wrappedModule.on("attached");
|
||||||
|
if (this.fileType) {
|
||||||
|
dropUnsupportedFileTypesModules(this);
|
||||||
|
}
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "findModule",
|
||||||
|
value: function findModule(name) {
|
||||||
|
for (var _i18 = 0, _this$modules6 = this.modules; _i18 < _this$modules6.length; _i18++) {
|
||||||
|
var _module5 = _this$modules6[_i18];
|
||||||
|
if (_module5.name === name) {
|
||||||
|
return _module5;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "setOptions",
|
||||||
|
value: function setOptions(options) {
|
||||||
|
var _this$delimiters, _this$delimiters2;
|
||||||
|
if (this.v4Constructor) {
|
||||||
|
throw new Error("setOptions() should not be called manually when using the v4 constructor");
|
||||||
|
}
|
||||||
|
if (!options) {
|
||||||
|
throw new Error("setOptions should be called with an object as first parameter");
|
||||||
|
}
|
||||||
|
var result = dxtOptionsSchema.validate(options);
|
||||||
|
if (result.success === false) {
|
||||||
|
throw new Error(result.error);
|
||||||
|
}
|
||||||
|
deprecatedMethod(this, "setOptions");
|
||||||
|
this.options = {};
|
||||||
|
var defaults = getDefaults();
|
||||||
|
for (var key in defaults) {
|
||||||
|
var defaultValue = defaults[key];
|
||||||
|
this.options[key] = options[key] != null ? options[key] : this[key] || defaultValue;
|
||||||
|
this[key] = this.options[key];
|
||||||
|
}
|
||||||
|
(_this$delimiters = this.delimiters).start && (_this$delimiters.start = utf8ToWord(this.delimiters.start));
|
||||||
|
(_this$delimiters2 = this.delimiters).end && (_this$delimiters2.end = utf8ToWord(this.delimiters.end));
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "loadZip",
|
||||||
|
value: function loadZip(zip) {
|
||||||
|
if (this.v4Constructor) {
|
||||||
|
throw new Error("loadZip() should not be called manually when using the v4 constructor");
|
||||||
|
}
|
||||||
|
deprecatedMethod(this, "loadZip");
|
||||||
|
if (zip.loadAsync) {
|
||||||
|
throw new XTInternalError("Docxtemplater doesn't handle JSZip version >=3, please use pizzip");
|
||||||
|
}
|
||||||
|
this.zip = zip;
|
||||||
|
this.updateFileTypeConfig();
|
||||||
|
this.modules = concatArrays([this.fileTypeConfig.baseModules.map(function (moduleFunction) {
|
||||||
|
return moduleFunction();
|
||||||
|
}), this.modules]);
|
||||||
|
for (var _i20 = 0, _this$modules8 = this.modules; _i20 < _this$modules8.length; _i20++) {
|
||||||
|
var _module6 = _this$modules8[_i20];
|
||||||
|
_module6.zip = this.zip;
|
||||||
|
_module6.docxtemplater = this;
|
||||||
|
_module6.fileTypeConfig = this.fileTypeConfig;
|
||||||
|
_module6.fileType = this.fileType;
|
||||||
|
_module6.xtOptions = this.options;
|
||||||
|
_module6.modules = this.modules;
|
||||||
|
}
|
||||||
|
dropUnsupportedFileTypesModules(this);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "precompileFile",
|
||||||
|
value: function precompileFile(fileName) {
|
||||||
|
var currentFile = this.createTemplateClass(fileName);
|
||||||
|
currentFile.preparse();
|
||||||
|
this.compiled[fileName] = currentFile;
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "compileFile",
|
||||||
|
value: function compileFile(fileName) {
|
||||||
|
this.compiled[fileName].parse();
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "getScopeManager",
|
||||||
|
value: function getScopeManager(to, currentFile, tags) {
|
||||||
|
var _this$scopeManagers;
|
||||||
|
(_this$scopeManagers = this.scopeManagers)[to] || (_this$scopeManagers[to] = createScope({
|
||||||
|
tags: tags,
|
||||||
|
parser: this.parser,
|
||||||
|
cachedParsers: currentFile.cachedParsers
|
||||||
|
}));
|
||||||
|
return this.scopeManagers[to];
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "resolveData",
|
||||||
|
value: function resolveData(data) {
|
||||||
|
var _this = this;
|
||||||
|
deprecatedMethod(this, "resolveData");
|
||||||
|
var errors = [];
|
||||||
|
if (!Object.keys(this.compiled).length) {
|
||||||
|
throwResolveBeforeCompile();
|
||||||
|
}
|
||||||
|
return Promise.resolve(data).then(function (data) {
|
||||||
|
_this.data = data;
|
||||||
|
_this.setModules({
|
||||||
|
data: _this.data,
|
||||||
|
Lexer: Lexer
|
||||||
|
});
|
||||||
|
_this.mapper = _this.modules.reduce(function (value, module) {
|
||||||
|
return module.getRenderedMap(value);
|
||||||
|
}, {});
|
||||||
|
return Promise.all(Object.keys(_this.mapper).map(function (to) {
|
||||||
|
var _this$mapper$to = _this.mapper[to],
|
||||||
|
from = _this$mapper$to.from,
|
||||||
|
data = _this$mapper$to.data;
|
||||||
|
return Promise.resolve(data).then(function (data) {
|
||||||
|
var currentFile = _this.compiled[from];
|
||||||
|
currentFile.filePath = to;
|
||||||
|
currentFile.scopeManager = _this.getScopeManager(to, currentFile, data);
|
||||||
|
return currentFile.resolveTags(data).then(function (result) {
|
||||||
|
currentFile.scopeManager.finishedResolving = true;
|
||||||
|
return result;
|
||||||
|
}, function (errs) {
|
||||||
|
pushArray(errors, errs);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
})).then(function (resolved) {
|
||||||
|
if (errors.length !== 0) {
|
||||||
|
if (_this.options.errorLogging) {
|
||||||
|
logErrors(errors, _this.options.errorLogging);
|
||||||
|
}
|
||||||
|
throwMultiError(errors);
|
||||||
|
}
|
||||||
|
return concatArrays(resolved);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "compile",
|
||||||
|
value: function compile() {
|
||||||
|
deprecatedMethod(this, "compile");
|
||||||
|
this.updateFileTypeConfig();
|
||||||
|
throwIfDuplicateModules(this.modules);
|
||||||
|
this.modules = reorderModules(this.modules);
|
||||||
|
if (Object.keys(this.compiled).length) {
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
var options = this.options;
|
||||||
|
for (var _i22 = 0, _this$modules0 = this.modules; _i22 < _this$modules0.length; _i22++) {
|
||||||
|
var _module7 = _this$modules0[_i22];
|
||||||
|
options = _module7.optionsTransformer(options, this);
|
||||||
|
}
|
||||||
|
this.options = options;
|
||||||
|
this.options.xmlFileNames = uniq(this.options.xmlFileNames);
|
||||||
|
for (var _i24 = 0, _this$options$xmlFile2 = this.options.xmlFileNames; _i24 < _this$options$xmlFile2.length; _i24++) {
|
||||||
|
var fileName = _this$options$xmlFile2[_i24];
|
||||||
|
var content = this.zip.files[fileName].asText();
|
||||||
|
this.xmlDocuments[fileName] = str2xml(content);
|
||||||
|
}
|
||||||
|
this.setModules({
|
||||||
|
zip: this.zip,
|
||||||
|
xmlDocuments: this.xmlDocuments
|
||||||
|
});
|
||||||
|
for (var _i26 = 0, _this$modules10 = this.modules; _i26 < _this$modules10.length; _i26++) {
|
||||||
|
var _module8 = _this$modules10[_i26];
|
||||||
|
_module8.xmlDocuments = this.xmlDocuments;
|
||||||
|
}
|
||||||
|
this.getTemplatedFiles();
|
||||||
|
/*
|
||||||
|
* Loop inside all templatedFiles (ie xml files with content).
|
||||||
|
* Sometimes they don't exist (footer.xml for example)
|
||||||
|
*/
|
||||||
|
this.sendEvent("before-preparse");
|
||||||
|
for (var _i28 = 0, _this$templatedFiles2 = this.templatedFiles; _i28 < _this$templatedFiles2.length; _i28++) {
|
||||||
|
var _fileName = _this$templatedFiles2[_i28];
|
||||||
|
if (this.zip.files[_fileName] != null) {
|
||||||
|
this.precompileFile(_fileName);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.sendEvent("after-preparse");
|
||||||
|
for (var _i30 = 0, _this$templatedFiles4 = this.templatedFiles; _i30 < _this$templatedFiles4.length; _i30++) {
|
||||||
|
var _fileName2 = _this$templatedFiles4[_i30];
|
||||||
|
if (this.zip.files[_fileName2] != null) {
|
||||||
|
this.compiled[_fileName2].parse({
|
||||||
|
noPostParse: true
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.sendEvent("after-parse");
|
||||||
|
for (var _i32 = 0, _this$templatedFiles6 = this.templatedFiles; _i32 < _this$templatedFiles6.length; _i32++) {
|
||||||
|
var _fileName3 = _this$templatedFiles6[_i32];
|
||||||
|
if (this.zip.files[_fileName3] != null) {
|
||||||
|
this.compiled[_fileName3].postparse();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.sendEvent("after-postparse");
|
||||||
|
this.setModules({
|
||||||
|
compiled: this.compiled
|
||||||
|
});
|
||||||
|
verifyErrors(this);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "updateFileTypeConfig",
|
||||||
|
value: function updateFileTypeConfig() {
|
||||||
|
this.relsTypes = getRelsTypes(this.zip);
|
||||||
|
var _getContentTypes = getContentTypes(this.zip),
|
||||||
|
overrides = _getContentTypes.overrides,
|
||||||
|
defaults = _getContentTypes.defaults,
|
||||||
|
contentTypes = _getContentTypes.contentTypes,
|
||||||
|
contentTypeXml = _getContentTypes.contentTypeXml;
|
||||||
|
if (contentTypeXml) {
|
||||||
|
this.filesContentTypes = collectContentTypes(overrides, defaults, this.zip);
|
||||||
|
this.invertedContentTypes = invertMap(this.filesContentTypes);
|
||||||
|
this.setModules({
|
||||||
|
contentTypes: this.contentTypes,
|
||||||
|
invertedContentTypes: this.invertedContentTypes
|
||||||
|
});
|
||||||
|
}
|
||||||
|
var fileType;
|
||||||
|
if (this.zip.files.mimetype) {
|
||||||
|
fileType = "odt";
|
||||||
|
}
|
||||||
|
for (var _i34 = 0, _this$modules12 = this.modules; _i34 < _this$modules12.length; _i34++) {
|
||||||
|
var _module9 = _this$modules12[_i34];
|
||||||
|
fileType = _module9.getFileType({
|
||||||
|
zip: this.zip,
|
||||||
|
contentTypes: contentTypes,
|
||||||
|
contentTypeXml: contentTypeXml,
|
||||||
|
overrides: overrides,
|
||||||
|
defaults: defaults,
|
||||||
|
doc: this
|
||||||
|
}) || fileType;
|
||||||
|
}
|
||||||
|
this.fileType = fileType;
|
||||||
|
if (fileType === "odt") {
|
||||||
|
throwFileTypeNotHandled(fileType);
|
||||||
|
}
|
||||||
|
if (!fileType) {
|
||||||
|
throwFileTypeNotIdentified(this.zip);
|
||||||
|
}
|
||||||
|
addXmlFileNamesFromXmlContentType(this);
|
||||||
|
dropUnsupportedFileTypesModules(this);
|
||||||
|
this.fileTypeConfig = this.options.fileTypeConfig || this.fileTypeConfig;
|
||||||
|
if (!this.fileTypeConfig) {
|
||||||
|
if (Docxtemplater.FileTypeConfig[this.fileType]) {
|
||||||
|
this.fileTypeConfig = Docxtemplater.FileTypeConfig[this.fileType]();
|
||||||
|
} else {
|
||||||
|
/*
|
||||||
|
* Error case handled since v3.60.2
|
||||||
|
* Throw specific error when trying to template xlsx file without xlsxmodule
|
||||||
|
*/
|
||||||
|
var message = "Filetype \"".concat(this.fileType, "\" is not supported");
|
||||||
|
var id = "filetype_not_supported";
|
||||||
|
if (this.fileType === "xlsx") {
|
||||||
|
message = "Filetype \"".concat(this.fileType, "\" is supported only with the paid XlsxModule");
|
||||||
|
id = "xlsx_filetype_needs_xlsx_module";
|
||||||
|
}
|
||||||
|
var err = new XTTemplateError(message);
|
||||||
|
err.properties = {
|
||||||
|
id: id,
|
||||||
|
explanation: message
|
||||||
|
};
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "renderAsync",
|
||||||
|
value: function renderAsync(data) {
|
||||||
|
var _this2 = this;
|
||||||
|
this.hideDeprecations = true;
|
||||||
|
var promise = this.resolveData(data);
|
||||||
|
this.hideDeprecations = false;
|
||||||
|
return promise.then(function () {
|
||||||
|
return _this2.render();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "render",
|
||||||
|
value: function render(data) {
|
||||||
|
if (this.rendered) {
|
||||||
|
throwRenderTwice();
|
||||||
|
}
|
||||||
|
this.rendered = true;
|
||||||
|
if (Object.keys(this.compiled).length === 0) {
|
||||||
|
this.compile();
|
||||||
|
}
|
||||||
|
if (this.errors.length > 0) {
|
||||||
|
throwRenderInvalidTemplate();
|
||||||
|
}
|
||||||
|
if (arguments.length > 0) {
|
||||||
|
this.data = data;
|
||||||
|
}
|
||||||
|
this.setModules({
|
||||||
|
data: this.data,
|
||||||
|
Lexer: Lexer
|
||||||
|
});
|
||||||
|
this.mapper || (this.mapper = this.modules.reduce(function (value, module) {
|
||||||
|
return module.getRenderedMap(value);
|
||||||
|
}, {}));
|
||||||
|
var output = [];
|
||||||
|
for (var to in this.mapper) {
|
||||||
|
var _this$mapper$to2 = this.mapper[to],
|
||||||
|
from = _this$mapper$to2.from,
|
||||||
|
_data = _this$mapper$to2.data;
|
||||||
|
var currentFile = this.compiled[from];
|
||||||
|
currentFile.scopeManager = this.getScopeManager(to, currentFile, _data);
|
||||||
|
currentFile.render(to);
|
||||||
|
output.push([to, currentFile.content, currentFile]);
|
||||||
|
delete currentFile.content;
|
||||||
|
}
|
||||||
|
for (var _i36 = 0; _i36 < output.length; _i36++) {
|
||||||
|
var outputPart = output[_i36];
|
||||||
|
var _outputPart = _slicedToArray(outputPart, 3),
|
||||||
|
content = _outputPart[1],
|
||||||
|
_currentFile = _outputPart[2];
|
||||||
|
for (var _i38 = 0, _this$modules14 = this.modules; _i38 < _this$modules14.length; _i38++) {
|
||||||
|
var _module0 = _this$modules14[_i38];
|
||||||
|
if (_module0.preZip) {
|
||||||
|
var result = _module0.preZip(content, _currentFile);
|
||||||
|
if (typeof result === "string") {
|
||||||
|
outputPart[1] = result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for (var _i40 = 0; _i40 < output.length; _i40++) {
|
||||||
|
var _output$_i = _slicedToArray(output[_i40], 2),
|
||||||
|
_to = _output$_i[0],
|
||||||
|
_content = _output$_i[1];
|
||||||
|
this.zip.file(_to, _content, {
|
||||||
|
createFolders: true
|
||||||
|
});
|
||||||
|
}
|
||||||
|
verifyErrors(this);
|
||||||
|
this.sendEvent("syncing-zip");
|
||||||
|
this.syncZip();
|
||||||
|
// The synced-zip event is used in the subtemplate module for example
|
||||||
|
this.sendEvent("synced-zip");
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "syncZip",
|
||||||
|
value: function syncZip() {
|
||||||
|
for (var fileName in this.xmlDocuments) {
|
||||||
|
this.zip.remove(fileName);
|
||||||
|
var content = xml2str(this.xmlDocuments[fileName]);
|
||||||
|
this.zip.file(fileName, content, {
|
||||||
|
createFolders: true
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "setData",
|
||||||
|
value: function setData(data) {
|
||||||
|
deprecatedMethod(this, "setData");
|
||||||
|
this.data = data;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "getZip",
|
||||||
|
value: function getZip() {
|
||||||
|
return this.zip;
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "createTemplateClass",
|
||||||
|
value: function createTemplateClass(path) {
|
||||||
|
var content = this.zip.files[path].asText();
|
||||||
|
return this.createTemplateClassFromContent(content, path);
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "createTemplateClassFromContent",
|
||||||
|
value: function createTemplateClassFromContent(content, filePath) {
|
||||||
|
var xmltOptions = {
|
||||||
|
filePath: filePath,
|
||||||
|
contentType: this.filesContentTypes[filePath],
|
||||||
|
relsType: this.relsTypes[filePath]
|
||||||
|
};
|
||||||
|
var defaults = getDefaults();
|
||||||
|
var defaultKeys = pushArray(Object.keys(defaults), ["filesContentTypes", "fileTypeConfig", "fileType", "modules"]);
|
||||||
|
for (var _i42 = 0; _i42 < defaultKeys.length; _i42++) {
|
||||||
|
var key = defaultKeys[_i42];
|
||||||
|
xmltOptions[key] = this[key];
|
||||||
|
}
|
||||||
|
return new Docxtemplater.XmlTemplater(content, xmltOptions);
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "getFullText",
|
||||||
|
value: function getFullText(path) {
|
||||||
|
return this.createTemplateClass(path || this.fileTypeConfig.textPath(this)).getFullText();
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "getTemplatedFiles",
|
||||||
|
value: function getTemplatedFiles() {
|
||||||
|
this.templatedFiles = this.fileTypeConfig.getTemplatedFiles(this.zip);
|
||||||
|
pushArray(this.templatedFiles, this.targets);
|
||||||
|
this.templatedFiles = uniq(this.templatedFiles);
|
||||||
|
return this.templatedFiles;
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "getTags",
|
||||||
|
value: function getTags() {
|
||||||
|
var result = {
|
||||||
|
headers: [],
|
||||||
|
footers: []
|
||||||
|
};
|
||||||
|
for (var key in this.compiled) {
|
||||||
|
var contentType = this.filesContentTypes[key];
|
||||||
|
if (contentType === "application/vnd.openxmlformats-officedocument.wordprocessingml.document.main+xml") {
|
||||||
|
result.document = {
|
||||||
|
target: key,
|
||||||
|
tags: _getTags(this.compiled[key].postparsed)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
if (contentType === "application/vnd.openxmlformats-officedocument.wordprocessingml.header+xml") {
|
||||||
|
result.headers.push({
|
||||||
|
target: key,
|
||||||
|
tags: _getTags(this.compiled[key].postparsed)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
if (contentType === "application/vnd.openxmlformats-officedocument.wordprocessingml.footer+xml") {
|
||||||
|
result.footers.push({
|
||||||
|
target: key,
|
||||||
|
tags: _getTags(this.compiled[key].postparsed)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Export functions, present since 3.62.0 */
|
||||||
|
}, {
|
||||||
|
key: "toBuffer",
|
||||||
|
value: function toBuffer(options) {
|
||||||
|
return this.zip.generate(_objectSpread(_objectSpread({
|
||||||
|
compression: "DEFLATE",
|
||||||
|
fileOrder: zipFileOrder
|
||||||
|
}, options), {}, {
|
||||||
|
type: "nodebuffer"
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
/* Export functions, present since 3.62.0 */
|
||||||
|
}, {
|
||||||
|
key: "toBlob",
|
||||||
|
value: function toBlob(options) {
|
||||||
|
return this.zip.generate(_objectSpread(_objectSpread({
|
||||||
|
compression: "DEFLATE",
|
||||||
|
fileOrder: zipFileOrder
|
||||||
|
}, options), {}, {
|
||||||
|
type: "blob"
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
/* Export functions, present since 3.62.0 */
|
||||||
|
}, {
|
||||||
|
key: "toBase64",
|
||||||
|
value: function toBase64(options) {
|
||||||
|
return this.zip.generate(_objectSpread(_objectSpread({
|
||||||
|
compression: "DEFLATE",
|
||||||
|
fileOrder: zipFileOrder
|
||||||
|
}, options), {}, {
|
||||||
|
type: "base64"
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
/* Export functions, present since 3.62.0 */
|
||||||
|
}, {
|
||||||
|
key: "toUint8Array",
|
||||||
|
value: function toUint8Array(options) {
|
||||||
|
return this.zip.generate(_objectSpread(_objectSpread({
|
||||||
|
compression: "DEFLATE",
|
||||||
|
fileOrder: zipFileOrder
|
||||||
|
}, options), {}, {
|
||||||
|
type: "uint8array"
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
/* Export functions, present since 3.62.0 */
|
||||||
|
}, {
|
||||||
|
key: "toArrayBuffer",
|
||||||
|
value: function toArrayBuffer(options) {
|
||||||
|
return this.zip.generate(_objectSpread(_objectSpread({
|
||||||
|
compression: "DEFLATE",
|
||||||
|
fileOrder: zipFileOrder
|
||||||
|
}, options), {}, {
|
||||||
|
type: "arraybuffer"
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
}]);
|
||||||
|
}();
|
||||||
|
Docxtemplater.DocUtils = DocUtils;
|
||||||
|
Docxtemplater.Errors = require("./errors.js");
|
||||||
|
Docxtemplater.XmlTemplater = require("./xml-templater.js");
|
||||||
|
Docxtemplater.FileTypeConfig = require("./file-type-config.js");
|
||||||
|
Docxtemplater.XmlMatcher = require("./xml-matcher.js");
|
||||||
|
module.exports = Docxtemplater;
|
||||||
|
module.exports["default"] = Docxtemplater;
|
||||||
426
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/docxtemplater.test-d.ts
generated
vendored
Normal file
426
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/docxtemplater.test-d.ts
generated
vendored
Normal file
@@ -0,0 +1,426 @@
|
|||||||
|
import Docxtemplater, { DXT } from "./docxtemplater";
|
||||||
|
import InspectModule from "./inspect-module";
|
||||||
|
import expressionParser from "../expressions";
|
||||||
|
import ieExpressionParser from "../expressions-ie11";
|
||||||
|
import TxtTemplater from "./text";
|
||||||
|
const PizZip: any = require("pizzip");
|
||||||
|
import { expectType, expectError } from "tsd";
|
||||||
|
|
||||||
|
expressionParser.filters.map = function (input: any, key: any): any {
|
||||||
|
if (!input) {
|
||||||
|
return input;
|
||||||
|
}
|
||||||
|
|
||||||
|
if ("map" in input) {
|
||||||
|
return input.map(function (x: any) {
|
||||||
|
return x[key];
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
ieExpressionParser.filters.map = function (input: any, key: any): any {
|
||||||
|
if (!input) {
|
||||||
|
return input;
|
||||||
|
}
|
||||||
|
|
||||||
|
if ("map" in input) {
|
||||||
|
return input.map(function (x: any) {
|
||||||
|
return x[key];
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const tDoc = new TxtTemplater("Hello {#users}{name},{/users} how are you ?", {
|
||||||
|
parser: expressionParser,
|
||||||
|
});
|
||||||
|
tDoc.render({ users: [{ name: "John" }, { name: "Baz" }] });
|
||||||
|
|
||||||
|
const tDoc2 = new TxtTemplater("Hello {#users}{name},{/users} how are you ?", {
|
||||||
|
parser: expressionParser,
|
||||||
|
});
|
||||||
|
tDoc2
|
||||||
|
.renderAsync({ users: [{ name: "John" }, { name: "Baz" }] })
|
||||||
|
.then(function (result: any) {
|
||||||
|
console.log(result.toUpperCase());
|
||||||
|
});
|
||||||
|
|
||||||
|
const doc1 = new Docxtemplater(
|
||||||
|
{},
|
||||||
|
{
|
||||||
|
delimiters: { start: "[[", end: "]]" },
|
||||||
|
nullGetter: function (part) {
|
||||||
|
expectError(part.foobar);
|
||||||
|
if (part.module === "rawxml") {
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
if (part.type === "placeholder" && part.value === "foobar") {
|
||||||
|
return "{Foobar}";
|
||||||
|
}
|
||||||
|
return "Hello";
|
||||||
|
},
|
||||||
|
}
|
||||||
|
);
|
||||||
|
const iModule = new InspectModule();
|
||||||
|
doc1.setData({ foo: "bar" });
|
||||||
|
doc1.attachModule({
|
||||||
|
set: function () {},
|
||||||
|
parse: function (placeHolderContent) {
|
||||||
|
if (placeHolderContent.indexOf(":hello") === 0) {
|
||||||
|
return {
|
||||||
|
type: "placeholder",
|
||||||
|
module: "mycustomModule",
|
||||||
|
value: placeHolderContent.substr(7),
|
||||||
|
isEmpty: "foobar",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
},
|
||||||
|
getFoobar: function () {},
|
||||||
|
});
|
||||||
|
doc1.attachModule(iModule);
|
||||||
|
const tags = iModule.getAllTags();
|
||||||
|
const tags2 = iModule.getAllStructuredTags();
|
||||||
|
const nullValues = iModule.fullInspected["word/document.xml"].nullValues;
|
||||||
|
const firstTag = nullValues.detail[0].part.value;
|
||||||
|
const scope = nullValues.detail[0].scopeManager.scopeList[0];
|
||||||
|
expectType<string>(firstTag);
|
||||||
|
doc1.render();
|
||||||
|
|
||||||
|
const buf: Buffer = doc1.toBuffer({
|
||||||
|
compression: "DEFLATE",
|
||||||
|
});
|
||||||
|
const blob: Blob = doc1.toBlob({
|
||||||
|
compression: "DEFLATE",
|
||||||
|
});
|
||||||
|
const str: string = doc1.toBase64({
|
||||||
|
compression: "DEFLATE",
|
||||||
|
});
|
||||||
|
const u8: Uint8Array = doc1.toUint8Array({
|
||||||
|
compression: "DEFLATE",
|
||||||
|
});
|
||||||
|
const ab: ArrayBuffer = doc1.toArrayBuffer({
|
||||||
|
compression: "DEFLATE",
|
||||||
|
});
|
||||||
|
|
||||||
|
new Docxtemplater(
|
||||||
|
{},
|
||||||
|
{
|
||||||
|
stripInvalidXMLChars: true,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
new Docxtemplater(
|
||||||
|
{},
|
||||||
|
{
|
||||||
|
stripInvalidXMLChars: false,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
new Docxtemplater(
|
||||||
|
{},
|
||||||
|
{
|
||||||
|
errorLogging: false,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
new Docxtemplater(
|
||||||
|
{},
|
||||||
|
{
|
||||||
|
errorLogging: "jsonl",
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
new Docxtemplater(
|
||||||
|
{},
|
||||||
|
{
|
||||||
|
errorLogging: "json",
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
expectError(doc1.foobar());
|
||||||
|
expectError(new Docxtemplater(1, 2));
|
||||||
|
expectError(new Docxtemplater({}, { delimiters: { start: 1, end: "]]" } }));
|
||||||
|
expectError(new Docxtemplater({}, { delimiters: { start: "[[" } }));
|
||||||
|
expectError(new Docxtemplater({}, { stripInvalidXMLChars: "yo" }));
|
||||||
|
|
||||||
|
const doc2 = new Docxtemplater();
|
||||||
|
doc2.loadZip(new PizZip("hello"));
|
||||||
|
|
||||||
|
// Error because parser should return a {get: fn} object
|
||||||
|
expectError(
|
||||||
|
doc2.setOptions({
|
||||||
|
parser: function (tag) {
|
||||||
|
return 10;
|
||||||
|
},
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
doc2.setOptions({
|
||||||
|
parser: function (tag) {
|
||||||
|
expectType<string>(tag);
|
||||||
|
return {
|
||||||
|
get: function (scope, context) {
|
||||||
|
const first = context.scopeList[0];
|
||||||
|
expectType<DXT.integer>(context.num);
|
||||||
|
expectError(context.foobar);
|
||||||
|
if (context.meta.part.value === tag) {
|
||||||
|
return scope[context.meta.part.value];
|
||||||
|
}
|
||||||
|
expectError(context.meta.part.other);
|
||||||
|
return scope[tag];
|
||||||
|
},
|
||||||
|
};
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const doc3 = new Docxtemplater();
|
||||||
|
doc3.loadZip(new PizZip("hello"));
|
||||||
|
doc3.compile();
|
||||||
|
doc3.resolveData({ a: "b" }).then(function () {
|
||||||
|
doc3.render();
|
||||||
|
});
|
||||||
|
doc3.replaceFirstSection = true;
|
||||||
|
doc3.replaceLastSection = true;
|
||||||
|
const doc4 = new Docxtemplater(new PizZip("hello"));
|
||||||
|
doc4.renderAsync({ a: "b" }).then(function () {
|
||||||
|
console.log("end");
|
||||||
|
});
|
||||||
|
const text = doc3.getFullText();
|
||||||
|
const text2 = doc3.getFullText("word/heading1.xml");
|
||||||
|
|
||||||
|
new Docxtemplater(new PizZip("hello"), { errorLogging: false });
|
||||||
|
|
||||||
|
// Error because getFullText requires a string parameter
|
||||||
|
expectError(doc3.getFullText(false));
|
||||||
|
expectError(doc3.getFullText(10));
|
||||||
|
|
||||||
|
const doc5 = new Docxtemplater(new PizZip("hello"), {
|
||||||
|
parser: expressionParser,
|
||||||
|
});
|
||||||
|
|
||||||
|
const doc6 = new Docxtemplater(new PizZip("hello"), {
|
||||||
|
parser: ieExpressionParser,
|
||||||
|
});
|
||||||
|
|
||||||
|
const doc7 = new Docxtemplater(new PizZip("hello"), {
|
||||||
|
parser: expressionParser.configure({
|
||||||
|
filters: {
|
||||||
|
foo: (a: any) => a,
|
||||||
|
bar: (a: any) => a,
|
||||||
|
},
|
||||||
|
csp: true,
|
||||||
|
cache: {},
|
||||||
|
literals: { true: true },
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
const doc8 = new Docxtemplater(new PizZip("hello"), {
|
||||||
|
parser: ieExpressionParser.configure({
|
||||||
|
filters: {
|
||||||
|
foo: (a: any) => a,
|
||||||
|
bar: (a: any) => a,
|
||||||
|
},
|
||||||
|
csp: true,
|
||||||
|
cache: {},
|
||||||
|
literals: { true: true },
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
const doc9 = new Docxtemplater(new PizZip("hello"), {
|
||||||
|
syntax: {
|
||||||
|
allowUnopenedTag: true,
|
||||||
|
allowUnclosedTag: true,
|
||||||
|
changeDelimiterPrefix: null,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const doc10 = new Docxtemplater(new PizZip("hello"), {
|
||||||
|
syntax: {
|
||||||
|
allowUnopenedTag: true,
|
||||||
|
changeDelimiterPrefix: "",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
function validStartChars(ch: string): boolean {
|
||||||
|
return /[a-z]/.test(ch);
|
||||||
|
}
|
||||||
|
function validContinuationChars(ch: string): boolean {
|
||||||
|
return /[a-z]/.test(ch);
|
||||||
|
}
|
||||||
|
expressionParser.configure({
|
||||||
|
isIdentifierStart: validStartChars,
|
||||||
|
isIdentifierContinue: validContinuationChars,
|
||||||
|
});
|
||||||
|
ieExpressionParser.configure({
|
||||||
|
isIdentifierStart: validStartChars,
|
||||||
|
isIdentifierContinue: validContinuationChars,
|
||||||
|
});
|
||||||
|
|
||||||
|
expressionParser.configure({
|
||||||
|
evaluateIdentifier(
|
||||||
|
tag: string,
|
||||||
|
scope: any,
|
||||||
|
scopeList: any[],
|
||||||
|
context: any
|
||||||
|
) {
|
||||||
|
let res = context.num + context.num;
|
||||||
|
return res;
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
expressionParser.configure({
|
||||||
|
setIdentifier(
|
||||||
|
tag: string,
|
||||||
|
value: any,
|
||||||
|
scope: any,
|
||||||
|
scopeList: any[],
|
||||||
|
context: any
|
||||||
|
) {
|
||||||
|
scopeList[0][tag] = value;
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
expressionParser.configure({
|
||||||
|
postEvaluate(
|
||||||
|
result: any,
|
||||||
|
tag: string,
|
||||||
|
scope: any,
|
||||||
|
context: DXT.ParserContext
|
||||||
|
) {
|
||||||
|
return result;
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
ieExpressionParser.configure({
|
||||||
|
postEvaluate(
|
||||||
|
result: any,
|
||||||
|
tag: string,
|
||||||
|
scope: any,
|
||||||
|
context: DXT.ParserContext
|
||||||
|
) {
|
||||||
|
return result;
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Define the parameter type for getFileType
|
||||||
|
interface FileTypeParams {
|
||||||
|
doc: Docxtemplater;
|
||||||
|
}
|
||||||
|
|
||||||
|
const avoidRenderingCoreXMLModule = {
|
||||||
|
name: "avoidRenderingCoreXMLModule",
|
||||||
|
getFileType({ doc }: FileTypeParams): void {
|
||||||
|
doc.targets = doc.targets.filter(function (file: string) {
|
||||||
|
if (
|
||||||
|
file === "docProps/core.xml" ||
|
||||||
|
file === "docProps/app.xml" ||
|
||||||
|
file === "docProps/custom.xml"
|
||||||
|
) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
});
|
||||||
|
},
|
||||||
|
};
|
||||||
|
new Docxtemplater(new PizZip("hello"), {
|
||||||
|
modules: [
|
||||||
|
avoidRenderingCoreXMLModule,
|
||||||
|
{
|
||||||
|
optionsTransformer(options, doc) {
|
||||||
|
doc.modules.forEach(function (module) {
|
||||||
|
if (module.name === "LoopModule") {
|
||||||
|
module.prefix.start = "FOR ";
|
||||||
|
module.prefix.start = "ENDFOR ";
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return options;
|
||||||
|
},
|
||||||
|
render(part, options) {
|
||||||
|
if (part.type === "placeholder") {
|
||||||
|
let value = options.scopeManager.getValue(part.value, {
|
||||||
|
part,
|
||||||
|
});
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
paragraphLoop: true,
|
||||||
|
linebreaks: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
interface SetOptions {
|
||||||
|
Lexer: any;
|
||||||
|
zip: any;
|
||||||
|
}
|
||||||
|
const fixDocPrCorruptionModule: DXT.Module = {
|
||||||
|
set(options: SetOptions) {
|
||||||
|
if (options.Lexer) {
|
||||||
|
this.Lexer = options.Lexer;
|
||||||
|
}
|
||||||
|
if (options.zip) {
|
||||||
|
this.zip = options.zip;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
on(event) {
|
||||||
|
if (event === "attached") {
|
||||||
|
this.attached = false;
|
||||||
|
}
|
||||||
|
if (event !== "syncing-zip") {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const zip = this.zip;
|
||||||
|
const Lexer = this.Lexer;
|
||||||
|
let prId = 1;
|
||||||
|
function setSingleAttribute(
|
||||||
|
partValue: string,
|
||||||
|
attr: string,
|
||||||
|
attrValue: string | number
|
||||||
|
) {
|
||||||
|
const regex = new RegExp(`(<.* ${attr}=")([^"]+)(".*)$`);
|
||||||
|
if (regex.test(partValue)) {
|
||||||
|
return partValue.replace(regex, `$1${attrValue}$3`);
|
||||||
|
}
|
||||||
|
let end = partValue.lastIndexOf("/>");
|
||||||
|
if (end === -1) {
|
||||||
|
end = partValue.lastIndexOf(">");
|
||||||
|
}
|
||||||
|
return (
|
||||||
|
partValue.substr(0, end) +
|
||||||
|
` ${attr}="${attrValue}"` +
|
||||||
|
partValue.substr(end)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
for (const f of zip.file(/\.xml$/)) {
|
||||||
|
let text = f.asText();
|
||||||
|
const xmllexed = Lexer.xmlparse(text, {
|
||||||
|
text: [],
|
||||||
|
other: ["wp:docPr"],
|
||||||
|
});
|
||||||
|
if (xmllexed.length > 1) {
|
||||||
|
text = xmllexed.reduce(function (
|
||||||
|
fullText: string,
|
||||||
|
part: DXT.Part
|
||||||
|
) {
|
||||||
|
if (
|
||||||
|
part.tag === "wp:docPr" &&
|
||||||
|
part.position &&
|
||||||
|
["start", "selfclosing"].indexOf(part.position) !== -1
|
||||||
|
) {
|
||||||
|
return (
|
||||||
|
fullText +
|
||||||
|
setSingleAttribute(part.value, "id", prId++)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return fullText + part.value;
|
||||||
|
}, "");
|
||||||
|
}
|
||||||
|
zip.file(f.name, text);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
};
|
||||||
|
new Docxtemplater(new PizZip("hello"), {
|
||||||
|
modules: [fixDocPrCorruptionModule],
|
||||||
|
});
|
||||||
36
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/error-logger.js
generated
vendored
Normal file
36
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/error-logger.js
generated
vendored
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
var _require = require("./doc-utils.js"),
|
||||||
|
pushArray = _require.pushArray;
|
||||||
|
// The error thrown here contains additional information when logged with JSON.stringify (it contains a properties object containing all suberrors).
|
||||||
|
function replaceErrors(key, value) {
|
||||||
|
if (value instanceof Error) {
|
||||||
|
return pushArray(Object.getOwnPropertyNames(value), ["stack"]).reduce(function (error, key) {
|
||||||
|
error[key] = value[key];
|
||||||
|
if (key === "stack") {
|
||||||
|
// This is used because in Firefox, stack is not an own property
|
||||||
|
error[key] = value[key].toString();
|
||||||
|
}
|
||||||
|
return error;
|
||||||
|
}, {});
|
||||||
|
}
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
function logger(error, logging) {
|
||||||
|
// eslint-disable-next-line no-console
|
||||||
|
console.log(JSON.stringify({
|
||||||
|
error: error
|
||||||
|
}, replaceErrors, logging === "json" ? 2 : null));
|
||||||
|
if (error.properties && error.properties.errors instanceof Array) {
|
||||||
|
var errorMessages = error.properties.errors.map(function (error) {
|
||||||
|
return error.properties.explanation;
|
||||||
|
}).join("\n");
|
||||||
|
// eslint-disable-next-line no-console
|
||||||
|
console.log("errorMessages", errorMessages);
|
||||||
|
/*
|
||||||
|
* errorMessages is a humanly readable message looking like this :
|
||||||
|
* 'The tag beginning with "foobar" is unopened'
|
||||||
|
*/
|
||||||
|
}
|
||||||
|
}
|
||||||
|
module.exports = logger;
|
||||||
409
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/errors.js
generated
vendored
Normal file
409
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/errors.js
generated
vendored
Normal file
@@ -0,0 +1,409 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
function _typeof(o) { "@babel/helpers - typeof"; return _typeof = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function (o) { return typeof o; } : function (o) { return o && "function" == typeof Symbol && o.constructor === Symbol && o !== Symbol.prototype ? "symbol" : typeof o; }, _typeof(o); }
|
||||||
|
function ownKeys(e, r) { var t = Object.keys(e); if (Object.getOwnPropertySymbols) { var o = Object.getOwnPropertySymbols(e); r && (o = o.filter(function (r) { return Object.getOwnPropertyDescriptor(e, r).enumerable; })), t.push.apply(t, o); } return t; }
|
||||||
|
function _objectSpread(e) { for (var r = 1; r < arguments.length; r++) { var t = null != arguments[r] ? arguments[r] : {}; r % 2 ? ownKeys(Object(t), !0).forEach(function (r) { _defineProperty(e, r, t[r]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(e, Object.getOwnPropertyDescriptors(t)) : ownKeys(Object(t)).forEach(function (r) { Object.defineProperty(e, r, Object.getOwnPropertyDescriptor(t, r)); }); } return e; }
|
||||||
|
function _defineProperty(e, r, t) { return (r = _toPropertyKey(r)) in e ? Object.defineProperty(e, r, { value: t, enumerable: !0, configurable: !0, writable: !0 }) : e[r] = t, e; }
|
||||||
|
function _toPropertyKey(t) { var i = _toPrimitive(t, "string"); return "symbol" == _typeof(i) ? i : i + ""; }
|
||||||
|
function _toPrimitive(t, r) { if ("object" != _typeof(t) || !t) return t; var e = t[Symbol.toPrimitive]; if (void 0 !== e) { var i = e.call(t, r || "default"); if ("object" != _typeof(i)) return i; throw new TypeError("@@toPrimitive must return a primitive value."); } return ("string" === r ? String : Number)(t); }
|
||||||
|
var _require = require("./utils.js"),
|
||||||
|
last = _require.last,
|
||||||
|
first = _require.first;
|
||||||
|
function XTError(message) {
|
||||||
|
this.name = "GenericError";
|
||||||
|
this.message = message;
|
||||||
|
this.stack = new Error(message).stack;
|
||||||
|
}
|
||||||
|
XTError.prototype = Error.prototype;
|
||||||
|
function XTTemplateError(message) {
|
||||||
|
this.name = "TemplateError";
|
||||||
|
this.message = message;
|
||||||
|
this.stack = new Error(message).stack;
|
||||||
|
}
|
||||||
|
XTTemplateError.prototype = new XTError();
|
||||||
|
function XTRenderingError(message) {
|
||||||
|
this.name = "RenderingError";
|
||||||
|
this.message = message;
|
||||||
|
this.stack = new Error(message).stack;
|
||||||
|
}
|
||||||
|
XTRenderingError.prototype = new XTError();
|
||||||
|
function XTScopeParserError(message) {
|
||||||
|
this.name = "ScopeParserError";
|
||||||
|
this.message = message;
|
||||||
|
this.stack = new Error(message).stack;
|
||||||
|
}
|
||||||
|
XTScopeParserError.prototype = new XTError();
|
||||||
|
function XTInternalError(message) {
|
||||||
|
this.name = "InternalError";
|
||||||
|
this.properties = {
|
||||||
|
explanation: "InternalError"
|
||||||
|
};
|
||||||
|
this.message = message;
|
||||||
|
this.stack = new Error(message).stack;
|
||||||
|
}
|
||||||
|
XTInternalError.prototype = new XTError();
|
||||||
|
function XTAPIVersionError(message) {
|
||||||
|
this.name = "APIVersionError";
|
||||||
|
this.properties = {
|
||||||
|
explanation: "APIVersionError"
|
||||||
|
};
|
||||||
|
this.message = message;
|
||||||
|
this.stack = new Error(message).stack;
|
||||||
|
}
|
||||||
|
XTAPIVersionError.prototype = new XTError();
|
||||||
|
function throwApiVersionError(msg, properties) {
|
||||||
|
var err = new XTAPIVersionError(msg);
|
||||||
|
err.properties = _objectSpread({
|
||||||
|
id: "api_version_error"
|
||||||
|
}, properties);
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
function throwMultiError(errors) {
|
||||||
|
var err = new XTTemplateError("Multi error");
|
||||||
|
err.properties = {
|
||||||
|
errors: errors,
|
||||||
|
id: "multi_error",
|
||||||
|
explanation: "The template has multiple errors"
|
||||||
|
};
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
function getUnopenedTagException(options) {
|
||||||
|
var err = new XTTemplateError("Unopened tag");
|
||||||
|
err.properties = {
|
||||||
|
xtag: last(options.xtag.split(" ")),
|
||||||
|
id: "unopened_tag",
|
||||||
|
context: options.xtag,
|
||||||
|
offset: options.offset,
|
||||||
|
lIndex: options.lIndex,
|
||||||
|
explanation: "The tag beginning with \"".concat(options.xtag.substr(0, 10), "\" is unopened")
|
||||||
|
};
|
||||||
|
return err;
|
||||||
|
}
|
||||||
|
function getDuplicateOpenTagException(options) {
|
||||||
|
var err = new XTTemplateError("Duplicate open tag, expected one open tag");
|
||||||
|
err.properties = {
|
||||||
|
xtag: first(options.xtag.split(" ")),
|
||||||
|
id: "duplicate_open_tag",
|
||||||
|
context: options.xtag,
|
||||||
|
offset: options.offset,
|
||||||
|
lIndex: options.lIndex,
|
||||||
|
explanation: "The tag beginning with \"".concat(options.xtag.substr(0, 10), "\" has duplicate open tags")
|
||||||
|
};
|
||||||
|
return err;
|
||||||
|
}
|
||||||
|
function getDuplicateCloseTagException(options) {
|
||||||
|
var err = new XTTemplateError("Duplicate close tag, expected one close tag");
|
||||||
|
err.properties = {
|
||||||
|
xtag: first(options.xtag.split(" ")),
|
||||||
|
id: "duplicate_close_tag",
|
||||||
|
context: options.xtag,
|
||||||
|
offset: options.offset,
|
||||||
|
lIndex: options.lIndex,
|
||||||
|
explanation: "The tag ending with \"".concat(options.xtag.substr(0, 10), "\" has duplicate close tags")
|
||||||
|
};
|
||||||
|
return err;
|
||||||
|
}
|
||||||
|
function getUnclosedTagException(options) {
|
||||||
|
var err = new XTTemplateError("Unclosed tag");
|
||||||
|
err.properties = {
|
||||||
|
xtag: first(options.xtag.split(" ")).substr(1),
|
||||||
|
id: "unclosed_tag",
|
||||||
|
context: options.xtag,
|
||||||
|
offset: options.offset,
|
||||||
|
lIndex: options.lIndex,
|
||||||
|
explanation: "The tag beginning with \"".concat(options.xtag.substr(0, 10), "\" is unclosed")
|
||||||
|
};
|
||||||
|
return err;
|
||||||
|
}
|
||||||
|
function throwXmlTagNotFound(options) {
|
||||||
|
var err = new XTTemplateError("No tag \"".concat(options.element, "\" was found at the ").concat(options.position));
|
||||||
|
var part = options.parsed[options.index];
|
||||||
|
err.properties = {
|
||||||
|
id: "no_xml_tag_found_at_".concat(options.position),
|
||||||
|
explanation: "No tag \"".concat(options.element, "\" was found at the ").concat(options.position),
|
||||||
|
offset: part.offset,
|
||||||
|
part: part,
|
||||||
|
parsed: options.parsed,
|
||||||
|
index: options.index,
|
||||||
|
element: options.element
|
||||||
|
};
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
function getCorruptCharactersException(_ref) {
|
||||||
|
var tag = _ref.tag,
|
||||||
|
value = _ref.value,
|
||||||
|
offset = _ref.offset;
|
||||||
|
var err = new XTRenderingError("There are some XML corrupt characters");
|
||||||
|
err.properties = {
|
||||||
|
id: "invalid_xml_characters",
|
||||||
|
xtag: tag,
|
||||||
|
value: value,
|
||||||
|
offset: offset,
|
||||||
|
explanation: "There are some corrupt characters for the field ".concat(tag)
|
||||||
|
};
|
||||||
|
return err;
|
||||||
|
}
|
||||||
|
function getInvalidRawXMLValueException(_ref2) {
|
||||||
|
var tag = _ref2.tag,
|
||||||
|
value = _ref2.value,
|
||||||
|
offset = _ref2.offset;
|
||||||
|
var err = new XTRenderingError("Non string values are not allowed for rawXML tags");
|
||||||
|
err.properties = {
|
||||||
|
id: "invalid_raw_xml_value",
|
||||||
|
xtag: tag,
|
||||||
|
value: value,
|
||||||
|
offset: offset,
|
||||||
|
explanation: "The value of the raw tag : '".concat(tag, "' is not a string")
|
||||||
|
};
|
||||||
|
return err;
|
||||||
|
}
|
||||||
|
function throwExpandNotFound(options) {
|
||||||
|
var _options$part = options.part,
|
||||||
|
value = _options$part.value,
|
||||||
|
offset = _options$part.offset,
|
||||||
|
_options$id = options.id,
|
||||||
|
id = _options$id === void 0 ? "raw_tag_outerxml_invalid" : _options$id,
|
||||||
|
_options$message = options.message,
|
||||||
|
message = _options$message === void 0 ? "Raw tag not in paragraph" : _options$message;
|
||||||
|
var part = options.part;
|
||||||
|
var _options$explanation = options.explanation,
|
||||||
|
explanation = _options$explanation === void 0 ? "The tag \"".concat(value, "\" is not inside a paragraph") : _options$explanation;
|
||||||
|
if (typeof explanation === "function") {
|
||||||
|
explanation = explanation(part);
|
||||||
|
}
|
||||||
|
var err = new XTTemplateError(message);
|
||||||
|
err.properties = {
|
||||||
|
id: id,
|
||||||
|
explanation: explanation,
|
||||||
|
rootError: options.rootError,
|
||||||
|
xtag: value,
|
||||||
|
offset: offset,
|
||||||
|
postparsed: options.postparsed,
|
||||||
|
expandTo: options.expandTo,
|
||||||
|
index: options.index
|
||||||
|
};
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
function throwRawTagShouldBeOnlyTextInParagraph(options) {
|
||||||
|
var err = new XTTemplateError("Raw tag should be the only text in paragraph");
|
||||||
|
var tag = options.part.value;
|
||||||
|
err.properties = {
|
||||||
|
id: "raw_xml_tag_should_be_only_text_in_paragraph",
|
||||||
|
explanation: "The raw tag \"".concat(tag, "\" should be the only text in this paragraph. This means that this tag should not be surrounded by any text or spaces."),
|
||||||
|
xtag: tag,
|
||||||
|
offset: options.part.offset,
|
||||||
|
paragraphParts: options.paragraphParts
|
||||||
|
};
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
function getUnmatchedLoopException(part) {
|
||||||
|
var location = part.location,
|
||||||
|
offset = part.offset,
|
||||||
|
square = part.square;
|
||||||
|
var t = location === "start" ? "unclosed" : "unopened";
|
||||||
|
var T = location === "start" ? "Unclosed" : "Unopened";
|
||||||
|
var err = new XTTemplateError("".concat(T, " loop"));
|
||||||
|
var tag = part.value;
|
||||||
|
err.properties = {
|
||||||
|
id: "".concat(t, "_loop"),
|
||||||
|
explanation: "The loop with tag \"".concat(tag, "\" is ").concat(t),
|
||||||
|
xtag: tag,
|
||||||
|
offset: offset
|
||||||
|
};
|
||||||
|
if (square) {
|
||||||
|
err.properties.square = square;
|
||||||
|
}
|
||||||
|
return err;
|
||||||
|
}
|
||||||
|
function getUnbalancedLoopException(pair, lastPair) {
|
||||||
|
var err = new XTTemplateError("Unbalanced loop tag");
|
||||||
|
var lastL = lastPair[0].part.value;
|
||||||
|
var lastR = lastPair[1].part.value;
|
||||||
|
var l = pair[0].part.value;
|
||||||
|
var r = pair[1].part.value;
|
||||||
|
err.properties = {
|
||||||
|
id: "unbalanced_loop_tags",
|
||||||
|
explanation: "Unbalanced loop tags {#".concat(lastL, "}{/").concat(lastR, "}{#").concat(l, "}{/").concat(r, "}"),
|
||||||
|
offset: [lastPair[0].part.offset, pair[1].part.offset],
|
||||||
|
lastPair: {
|
||||||
|
left: lastPair[0].part.value,
|
||||||
|
right: lastPair[1].part.value
|
||||||
|
},
|
||||||
|
pair: {
|
||||||
|
left: pair[0].part.value,
|
||||||
|
right: pair[1].part.value
|
||||||
|
}
|
||||||
|
};
|
||||||
|
return err;
|
||||||
|
}
|
||||||
|
function getClosingTagNotMatchOpeningTag(_ref3) {
|
||||||
|
var tags = _ref3.tags;
|
||||||
|
var err = new XTTemplateError("Closing tag does not match opening tag");
|
||||||
|
err.properties = {
|
||||||
|
id: "closing_tag_does_not_match_opening_tag",
|
||||||
|
explanation: "The tag \"".concat(tags[0].value, "\" is closed by the tag \"").concat(tags[1].value, "\""),
|
||||||
|
openingtag: first(tags).value,
|
||||||
|
offset: [first(tags).offset, last(tags).offset],
|
||||||
|
closingtag: last(tags).value
|
||||||
|
};
|
||||||
|
if (first(tags).square) {
|
||||||
|
err.properties.square = [first(tags).square, last(tags).square];
|
||||||
|
}
|
||||||
|
return err;
|
||||||
|
}
|
||||||
|
function getScopeCompilationError(_ref4) {
|
||||||
|
var tag = _ref4.tag,
|
||||||
|
rootError = _ref4.rootError,
|
||||||
|
offset = _ref4.offset;
|
||||||
|
var err = new XTScopeParserError("Scope parser compilation failed");
|
||||||
|
err.properties = {
|
||||||
|
id: "scopeparser_compilation_failed",
|
||||||
|
offset: offset,
|
||||||
|
xtag: tag,
|
||||||
|
explanation: "The scope parser for the tag \"".concat(tag, "\" failed to compile"),
|
||||||
|
rootError: rootError
|
||||||
|
};
|
||||||
|
return err;
|
||||||
|
}
|
||||||
|
function getScopeParserExecutionError(_ref5) {
|
||||||
|
var tag = _ref5.tag,
|
||||||
|
scope = _ref5.scope,
|
||||||
|
error = _ref5.error,
|
||||||
|
offset = _ref5.offset;
|
||||||
|
var err = new XTScopeParserError("Scope parser execution failed");
|
||||||
|
err.properties = {
|
||||||
|
id: "scopeparser_execution_failed",
|
||||||
|
explanation: "The scope parser for the tag ".concat(tag, " failed to execute"),
|
||||||
|
scope: scope,
|
||||||
|
offset: offset,
|
||||||
|
xtag: tag,
|
||||||
|
rootError: error
|
||||||
|
};
|
||||||
|
return err;
|
||||||
|
}
|
||||||
|
function getLoopPositionProducesInvalidXMLError(_ref6) {
|
||||||
|
var tag = _ref6.tag,
|
||||||
|
offset = _ref6.offset;
|
||||||
|
var err = new XTTemplateError("The position of the loop tags \"".concat(tag, "\" would produce invalid XML"));
|
||||||
|
err.properties = {
|
||||||
|
xtag: tag,
|
||||||
|
id: "loop_position_invalid",
|
||||||
|
explanation: "The tags \"".concat(tag, "\" are misplaced in the document, for example one of them is in a table and the other one outside the table"),
|
||||||
|
offset: offset
|
||||||
|
};
|
||||||
|
return err;
|
||||||
|
}
|
||||||
|
function throwUnimplementedTagType(part, index) {
|
||||||
|
var errorMsg = "Unimplemented tag type \"".concat(part.type, "\"");
|
||||||
|
if (part.module) {
|
||||||
|
errorMsg += " \"".concat(part.module, "\"");
|
||||||
|
}
|
||||||
|
var err = new XTTemplateError(errorMsg);
|
||||||
|
err.properties = {
|
||||||
|
part: part,
|
||||||
|
index: index,
|
||||||
|
id: "unimplemented_tag_type"
|
||||||
|
};
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
function throwMalformedXml() {
|
||||||
|
var err = new XTInternalError("Malformed xml");
|
||||||
|
err.properties = {
|
||||||
|
explanation: "The template contains malformed xml",
|
||||||
|
id: "malformed_xml"
|
||||||
|
};
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
function throwResolveBeforeCompile() {
|
||||||
|
var err = new XTInternalError("You must run `.compile()` before running `.resolveData()`");
|
||||||
|
err.properties = {
|
||||||
|
id: "resolve_before_compile",
|
||||||
|
explanation: "You must run `.compile()` before running `.resolveData()`"
|
||||||
|
};
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
function throwRenderInvalidTemplate() {
|
||||||
|
var err = new XTInternalError("You should not call .render on a document that had compilation errors");
|
||||||
|
err.properties = {
|
||||||
|
id: "render_on_invalid_template",
|
||||||
|
explanation: "You should not call .render on a document that had compilation errors"
|
||||||
|
};
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
function throwRenderTwice() {
|
||||||
|
var err = new XTInternalError("You should not call .render twice on the same docxtemplater instance");
|
||||||
|
err.properties = {
|
||||||
|
id: "render_twice",
|
||||||
|
explanation: "You should not call .render twice on the same docxtemplater instance"
|
||||||
|
};
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
function throwFileTypeNotIdentified(zip) {
|
||||||
|
var files = Object.keys(zip.files).slice(0, 10);
|
||||||
|
var msg = "";
|
||||||
|
if (files.length === 0) {
|
||||||
|
msg = "Empty zip file";
|
||||||
|
} else {
|
||||||
|
msg = "Zip file contains : ".concat(files.join(","));
|
||||||
|
}
|
||||||
|
var err = new XTInternalError("The filetype for this file could not be identified, is this file corrupted ? ".concat(msg));
|
||||||
|
err.properties = {
|
||||||
|
id: "filetype_not_identified",
|
||||||
|
explanation: "The filetype for this file could not be identified, is this file corrupted ? ".concat(msg)
|
||||||
|
};
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
function throwXmlInvalid(content, offset) {
|
||||||
|
var err = new XTTemplateError("An XML file has invalid xml");
|
||||||
|
err.properties = {
|
||||||
|
id: "file_has_invalid_xml",
|
||||||
|
content: content,
|
||||||
|
offset: offset,
|
||||||
|
explanation: "The docx contains invalid XML, it is most likely corrupt"
|
||||||
|
};
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
function throwFileTypeNotHandled(fileType) {
|
||||||
|
var err = new XTInternalError("The filetype \"".concat(fileType, "\" is not handled by docxtemplater"));
|
||||||
|
err.properties = {
|
||||||
|
id: "filetype_not_handled",
|
||||||
|
explanation: "The file you are trying to generate is of type \"".concat(fileType, "\", but only docx and pptx formats are handled"),
|
||||||
|
fileType: fileType
|
||||||
|
};
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
module.exports = {
|
||||||
|
XTError: XTError,
|
||||||
|
XTTemplateError: XTTemplateError,
|
||||||
|
XTInternalError: XTInternalError,
|
||||||
|
XTScopeParserError: XTScopeParserError,
|
||||||
|
XTAPIVersionError: XTAPIVersionError,
|
||||||
|
// Remove this alias in v4
|
||||||
|
RenderingError: XTRenderingError,
|
||||||
|
XTRenderingError: XTRenderingError,
|
||||||
|
getClosingTagNotMatchOpeningTag: getClosingTagNotMatchOpeningTag,
|
||||||
|
getLoopPositionProducesInvalidXMLError: getLoopPositionProducesInvalidXMLError,
|
||||||
|
getScopeCompilationError: getScopeCompilationError,
|
||||||
|
getScopeParserExecutionError: getScopeParserExecutionError,
|
||||||
|
getUnclosedTagException: getUnclosedTagException,
|
||||||
|
getUnopenedTagException: getUnopenedTagException,
|
||||||
|
getUnmatchedLoopException: getUnmatchedLoopException,
|
||||||
|
getDuplicateCloseTagException: getDuplicateCloseTagException,
|
||||||
|
getDuplicateOpenTagException: getDuplicateOpenTagException,
|
||||||
|
getCorruptCharactersException: getCorruptCharactersException,
|
||||||
|
getInvalidRawXMLValueException: getInvalidRawXMLValueException,
|
||||||
|
getUnbalancedLoopException: getUnbalancedLoopException,
|
||||||
|
throwApiVersionError: throwApiVersionError,
|
||||||
|
throwFileTypeNotHandled: throwFileTypeNotHandled,
|
||||||
|
throwFileTypeNotIdentified: throwFileTypeNotIdentified,
|
||||||
|
throwMalformedXml: throwMalformedXml,
|
||||||
|
throwMultiError: throwMultiError,
|
||||||
|
throwExpandNotFound: throwExpandNotFound,
|
||||||
|
throwRawTagShouldBeOnlyTextInParagraph: throwRawTagShouldBeOnlyTextInParagraph,
|
||||||
|
throwUnimplementedTagType: throwUnimplementedTagType,
|
||||||
|
throwXmlTagNotFound: throwXmlTagNotFound,
|
||||||
|
throwXmlInvalid: throwXmlInvalid,
|
||||||
|
throwResolveBeforeCompile: throwResolveBeforeCompile,
|
||||||
|
throwRenderInvalidTemplate: throwRenderInvalidTemplate,
|
||||||
|
throwRenderTwice: throwRenderTwice
|
||||||
|
};
|
||||||
32
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/expressions-ie11.d.ts
generated
vendored
Normal file
32
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/expressions-ie11.d.ts
generated
vendored
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
import { DXT } from "./js/docxtemplater";
|
||||||
|
|
||||||
|
interface ParserOptions {
|
||||||
|
filters?: { [x: string]: (input: any, ...filters: any[]) => any };
|
||||||
|
csp?: boolean;
|
||||||
|
cache?: any;
|
||||||
|
literals?: { [x: string]: any };
|
||||||
|
isIdentifierStart?: (char: string) => boolean;
|
||||||
|
isIdentifierContinue?: (char: string) => boolean;
|
||||||
|
handleDotThis?: boolean;
|
||||||
|
postEvaluate?: (
|
||||||
|
result: any,
|
||||||
|
tag: string,
|
||||||
|
scope: any,
|
||||||
|
context: DXT.ParserContext
|
||||||
|
) => any;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ExpressionParser extends DXT.Parser {
|
||||||
|
compiled: any;
|
||||||
|
getIdentifiers(): string[];
|
||||||
|
getObjectIdentifiers(): any;
|
||||||
|
}
|
||||||
|
|
||||||
|
type Parser = {
|
||||||
|
(tag: string): ExpressionParser;
|
||||||
|
filters: { [x: string]: (input: any, ...filters: any[]) => any };
|
||||||
|
configure: (options: ParserOptions) => (tag: string) => DXT.Parser;
|
||||||
|
};
|
||||||
|
|
||||||
|
declare var expressionParser: Parser;
|
||||||
|
export default expressionParser;
|
||||||
186
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/expressions-ie11.js
generated
vendored
Normal file
186
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/expressions-ie11.js
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
45
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/expressions.d.ts
generated
vendored
Normal file
45
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/expressions.d.ts
generated
vendored
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
import { DXT } from "./js/docxtemplater";
|
||||||
|
|
||||||
|
interface ParserOptions {
|
||||||
|
filters?: { [x: string]: (input: any, ...filters: any[]) => any };
|
||||||
|
csp?: boolean;
|
||||||
|
cache?: any;
|
||||||
|
literals?: { [x: string]: any };
|
||||||
|
isIdentifierStart?: (char: string) => boolean;
|
||||||
|
isIdentifierContinue?: (char: string) => boolean;
|
||||||
|
handleDotThis?: boolean;
|
||||||
|
postEvaluate?: (
|
||||||
|
result: any,
|
||||||
|
tag: string,
|
||||||
|
scope: any,
|
||||||
|
context: DXT.ParserContext
|
||||||
|
) => any;
|
||||||
|
evaluateIdentifier?: (
|
||||||
|
tag: string,
|
||||||
|
scope: any,
|
||||||
|
scopeList: any[],
|
||||||
|
context: DXT.ParserContext
|
||||||
|
) => any;
|
||||||
|
setIdentifier?: (
|
||||||
|
tag: string,
|
||||||
|
value: any,
|
||||||
|
scope: any,
|
||||||
|
scopeList: any[],
|
||||||
|
context: DXT.ParserContext
|
||||||
|
) => any;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ExpressionParser extends DXT.Parser {
|
||||||
|
compiled: any;
|
||||||
|
getIdentifiers(): string[];
|
||||||
|
getObjectIdentifiers(): any;
|
||||||
|
}
|
||||||
|
|
||||||
|
type Parser = {
|
||||||
|
(tag: string): ExpressionParser;
|
||||||
|
filters: { [x: string]: (input: any, ...filters: any[]) => any };
|
||||||
|
configure: (options: ParserOptions) => (tag: string) => ExpressionParser;
|
||||||
|
};
|
||||||
|
|
||||||
|
declare var expressionParser: Parser;
|
||||||
|
export default expressionParser;
|
||||||
328
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/expressions.js
generated
vendored
Normal file
328
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/expressions.js
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
89
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/file-type-config.js
generated
vendored
Normal file
89
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/file-type-config.js
generated
vendored
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
var loopModule = require("./modules/loop.js");
|
||||||
|
var spacePreserveModule = require("./modules/space-preserve.js");
|
||||||
|
var rawXmlModule = require("./modules/rawxml.js");
|
||||||
|
var expandPairTrait = require("./modules/expand-pair-trait.js");
|
||||||
|
var render = require("./modules/render.js");
|
||||||
|
function DocXFileTypeConfig() {
|
||||||
|
return {
|
||||||
|
getTemplatedFiles: function getTemplatedFiles() {
|
||||||
|
return [];
|
||||||
|
},
|
||||||
|
textPath: function textPath(doc) {
|
||||||
|
return doc.textTarget;
|
||||||
|
},
|
||||||
|
tagsXmlTextArray: ["Company", "HyperlinkBase", "Manager", "cp:category", "cp:keywords", "dc:creator", "dc:description", "dc:subject", "dc:title", "cp:contentStatus", "w:t", "a:t", "m:t", "vt:lpstr", "vt:lpwstr"],
|
||||||
|
tagsXmlLexedArray: ["w:proofState", "w:tc", "w:tr", "w:tbl", "w:ftr", "w:hdr", "w:body", "w:document", "w:p", "w:r", "w:br", "w:rPr", "w:pPr", "w:spacing", "w:sdtContent", "w:sdt", "w:drawing", "w:sectPr", "w:type", "w:headerReference", "w:footerReference", "w:bookmarkStart", "w:bookmarkEnd", "w:commentRangeStart", "w:commentRangeEnd", "w:commentReference"],
|
||||||
|
droppedTagsInsidePlaceholder: ["w:p", "w:br", "w:bookmarkStart", "w:bookmarkEnd"],
|
||||||
|
expandTags: [{
|
||||||
|
contains: "w:tc",
|
||||||
|
expand: "w:tr"
|
||||||
|
}],
|
||||||
|
onParagraphLoop: [{
|
||||||
|
contains: "w:p",
|
||||||
|
expand: "w:p",
|
||||||
|
onlyTextInTag: true
|
||||||
|
}],
|
||||||
|
tagRawXml: "w:p",
|
||||||
|
baseModules: [loopModule, spacePreserveModule, expandPairTrait, rawXmlModule, render],
|
||||||
|
tagShouldContain: [{
|
||||||
|
tag: "w:sdtContent",
|
||||||
|
shouldContain: ["w:p", "w:r", "w:commentRangeStart", "w:sdt"],
|
||||||
|
value: "<w:p></w:p>"
|
||||||
|
}, {
|
||||||
|
tag: "w:tc",
|
||||||
|
shouldContain: ["w:p"],
|
||||||
|
value: "<w:p></w:p>"
|
||||||
|
}, {
|
||||||
|
tag: "w:tr",
|
||||||
|
shouldContain: ["w:tc"],
|
||||||
|
drop: true
|
||||||
|
}, {
|
||||||
|
tag: "w:tbl",
|
||||||
|
shouldContain: ["w:tr"],
|
||||||
|
drop: true
|
||||||
|
}]
|
||||||
|
};
|
||||||
|
}
|
||||||
|
function PptXFileTypeConfig() {
|
||||||
|
return {
|
||||||
|
getTemplatedFiles: function getTemplatedFiles() {
|
||||||
|
return [];
|
||||||
|
},
|
||||||
|
textPath: function textPath(doc) {
|
||||||
|
return doc.textTarget;
|
||||||
|
},
|
||||||
|
tagsXmlTextArray: ["Company", "HyperlinkBase", "Manager", "cp:category", "cp:keywords", "dc:creator", "dc:description", "dc:subject", "dc:title", "a:t", "m:t", "vt:lpstr", "vt:lpwstr"],
|
||||||
|
tagsXmlLexedArray: ["p:sp", "a:tc", "a:tr", "a:tbl", "a:graphicData", "a:p", "a:r", "a:rPr", "p:txBody", "a:txBody", "a:off", "a:ext", "p:graphicFrame", "p:xfrm", "a16:rowId", "a:endParaRPr"],
|
||||||
|
droppedTagsInsidePlaceholder: ["a:p", "a:endParaRPr"],
|
||||||
|
expandTags: [{
|
||||||
|
contains: "a:tc",
|
||||||
|
expand: "a:tr"
|
||||||
|
}],
|
||||||
|
onParagraphLoop: [{
|
||||||
|
contains: "a:p",
|
||||||
|
expand: "a:p",
|
||||||
|
onlyTextInTag: true
|
||||||
|
}],
|
||||||
|
tagRawXml: "p:sp",
|
||||||
|
baseModules: [loopModule, expandPairTrait, rawXmlModule, render],
|
||||||
|
tagShouldContain: [{
|
||||||
|
tag: "a:tbl",
|
||||||
|
shouldContain: ["a:tr"],
|
||||||
|
dropParent: "p:graphicFrame"
|
||||||
|
}, {
|
||||||
|
tag: "p:txBody",
|
||||||
|
shouldContain: ["a:p"],
|
||||||
|
value: "<a:p></a:p>"
|
||||||
|
}, {
|
||||||
|
tag: "a:txBody",
|
||||||
|
shouldContain: ["a:p"],
|
||||||
|
value: "<a:p></a:p>"
|
||||||
|
}]
|
||||||
|
};
|
||||||
|
}
|
||||||
|
module.exports = {
|
||||||
|
docx: DocXFileTypeConfig,
|
||||||
|
pptx: PptXFileTypeConfig
|
||||||
|
};
|
||||||
29
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/filetypes.js
generated
vendored
Normal file
29
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/filetypes.js
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
var docxContentType = "application/vnd.openxmlformats-officedocument.wordprocessingml.document.main+xml";
|
||||||
|
var docxmContentType = "application/vnd.ms-word.document.macroEnabled.main+xml";
|
||||||
|
var dotxContentType = "application/vnd.openxmlformats-officedocument.wordprocessingml.template.main+xml";
|
||||||
|
var dotmContentType = "application/vnd.ms-word.template.macroEnabledTemplate.main+xml";
|
||||||
|
var headerContentType = "application/vnd.openxmlformats-officedocument.wordprocessingml.header+xml";
|
||||||
|
var footnotesContentType = "application/vnd.openxmlformats-officedocument.wordprocessingml.footnotes+xml";
|
||||||
|
var commentsContentType = "application/vnd.openxmlformats-officedocument.wordprocessingml.comments+xml";
|
||||||
|
var footerContentType = "application/vnd.openxmlformats-officedocument.wordprocessingml.footer+xml";
|
||||||
|
var pptxContentType = "application/vnd.openxmlformats-officedocument.presentationml.slide+xml";
|
||||||
|
var pptxSlideMaster = "application/vnd.openxmlformats-officedocument.presentationml.slideMaster+xml";
|
||||||
|
var pptxSlideLayout = "application/vnd.openxmlformats-officedocument.presentationml.slideLayout+xml";
|
||||||
|
var pptxPresentationContentType = "application/vnd.openxmlformats-officedocument.presentationml.presentation.main+xml";
|
||||||
|
var xlsxContentType = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet.main+xml";
|
||||||
|
var xlsmContentType = "application/vnd.ms-excel.sheet.macroEnabled.main+xml";
|
||||||
|
var xlsxWorksheetContentType = "application/vnd.openxmlformats-officedocument.spreadsheetml.worksheet+xml";
|
||||||
|
/*
|
||||||
|
* This is used for the main part of the document, ie usually that would be the
|
||||||
|
* type of word/document.xml
|
||||||
|
*/
|
||||||
|
var main = [docxContentType, docxmContentType, dotxContentType, dotmContentType];
|
||||||
|
var filetypes = {
|
||||||
|
main: main,
|
||||||
|
docx: [headerContentType].concat(main, [footerContentType, footnotesContentType, commentsContentType]),
|
||||||
|
pptx: [pptxContentType, pptxSlideMaster, pptxSlideLayout, pptxPresentationContentType],
|
||||||
|
xlsx: [xlsxContentType, xlsmContentType, xlsxWorksheetContentType]
|
||||||
|
};
|
||||||
|
module.exports = filetypes;
|
||||||
45
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/get-content-types.js
generated
vendored
Normal file
45
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/get-content-types.js
generated
vendored
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
var _require = require("./doc-utils.js"),
|
||||||
|
str2xml = _require.str2xml;
|
||||||
|
var ctXML = "[Content_Types].xml";
|
||||||
|
function collectContentTypes(overrides, defaults, zip) {
|
||||||
|
var partNames = {};
|
||||||
|
for (var _i2 = 0; _i2 < overrides.length; _i2++) {
|
||||||
|
var override = overrides[_i2];
|
||||||
|
var contentType = override.getAttribute("ContentType");
|
||||||
|
var partName = override.getAttribute("PartName").substr(1);
|
||||||
|
partNames[partName] = contentType;
|
||||||
|
}
|
||||||
|
var _loop = function _loop() {
|
||||||
|
var def = defaults[_i4];
|
||||||
|
var contentType = def.getAttribute("ContentType");
|
||||||
|
var extension = def.getAttribute("Extension");
|
||||||
|
zip.file(/./).map(function (_ref) {
|
||||||
|
var name = _ref.name;
|
||||||
|
if (name.slice(name.length - extension.length) === extension && !partNames[name] && name !== ctXML) {
|
||||||
|
partNames[name] = contentType;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
|
for (var _i4 = 0; _i4 < defaults.length; _i4++) {
|
||||||
|
_loop();
|
||||||
|
}
|
||||||
|
return partNames;
|
||||||
|
}
|
||||||
|
function getContentTypes(zip) {
|
||||||
|
var contentTypes = zip.files[ctXML];
|
||||||
|
var contentTypeXml = contentTypes ? str2xml(contentTypes.asText()) : null;
|
||||||
|
var overrides = contentTypeXml ? contentTypeXml.getElementsByTagName("Override") : null;
|
||||||
|
var defaults = contentTypeXml ? contentTypeXml.getElementsByTagName("Default") : null;
|
||||||
|
return {
|
||||||
|
overrides: overrides,
|
||||||
|
defaults: defaults,
|
||||||
|
contentTypes: contentTypes,
|
||||||
|
contentTypeXml: contentTypeXml
|
||||||
|
};
|
||||||
|
}
|
||||||
|
module.exports = {
|
||||||
|
collectContentTypes: collectContentTypes,
|
||||||
|
getContentTypes: getContentTypes
|
||||||
|
};
|
||||||
19
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/get-relation-types.js
generated
vendored
Normal file
19
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/get-relation-types.js
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
var _require = require("./doc-utils.js"),
|
||||||
|
str2xml = _require.str2xml;
|
||||||
|
var relsFile = "_rels/.rels";
|
||||||
|
function getRelsTypes(zip) {
|
||||||
|
var rootRels = zip.files[relsFile];
|
||||||
|
var rootRelsXml = rootRels ? str2xml(rootRels.asText()) : null;
|
||||||
|
var rootRelationships = rootRelsXml ? rootRelsXml.getElementsByTagName("Relationship") : [];
|
||||||
|
var relsTypes = {};
|
||||||
|
for (var _i2 = 0; _i2 < rootRelationships.length; _i2++) {
|
||||||
|
var relation = rootRelationships[_i2];
|
||||||
|
relsTypes[relation.getAttribute("Target")] = relation.getAttribute("Type");
|
||||||
|
}
|
||||||
|
return relsTypes;
|
||||||
|
}
|
||||||
|
module.exports = {
|
||||||
|
getRelsTypes: getRelsTypes
|
||||||
|
};
|
||||||
14
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/get-resolved-id.js
generated
vendored
Normal file
14
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/get-resolved-id.js
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
function getResolvedId(part, options) {
|
||||||
|
if (part.lIndex == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
var path = options.scopeManager.scopePathItem;
|
||||||
|
if (part.parentPart) {
|
||||||
|
path = path.slice(0, path.length - 1);
|
||||||
|
}
|
||||||
|
var res = options.filePath + "@" + part.lIndex.toString() + "-" + path.join("-");
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
module.exports = getResolvedId;
|
||||||
100
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/get-tags.js
generated
vendored
Normal file
100
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/get-tags.js
generated
vendored
Normal file
@@ -0,0 +1,100 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
function _toConsumableArray(r) { return _arrayWithoutHoles(r) || _iterableToArray(r) || _unsupportedIterableToArray(r) || _nonIterableSpread(); }
|
||||||
|
function _nonIterableSpread() { throw new TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); }
|
||||||
|
function _unsupportedIterableToArray(r, a) { if (r) { if ("string" == typeof r) return _arrayLikeToArray(r, a); var t = {}.toString.call(r).slice(8, -1); return "Object" === t && r.constructor && (t = r.constructor.name), "Map" === t || "Set" === t ? Array.from(r) : "Arguments" === t || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(t) ? _arrayLikeToArray(r, a) : void 0; } }
|
||||||
|
function _iterableToArray(r) { if ("undefined" != typeof Symbol && null != r[Symbol.iterator] || null != r["@@iterator"]) return Array.from(r); }
|
||||||
|
function _arrayWithoutHoles(r) { if (Array.isArray(r)) return _arrayLikeToArray(r); }
|
||||||
|
function _arrayLikeToArray(r, a) { (null == a || a > r.length) && (a = r.length); for (var e = 0, n = Array(a); e < a; e++) n[e] = r[e]; return n; }
|
||||||
|
function isPlaceholder(part) {
|
||||||
|
return part.type === "placeholder";
|
||||||
|
}
|
||||||
|
|
||||||
|
/* eslint-disable-next-line complexity */
|
||||||
|
function getTags(postParsed) {
|
||||||
|
var tags = {};
|
||||||
|
var stack = [{
|
||||||
|
items: postParsed.filter(isPlaceholder),
|
||||||
|
parents: [],
|
||||||
|
path: []
|
||||||
|
}];
|
||||||
|
function processFiltered(part, current, filtered) {
|
||||||
|
if (filtered.length) {
|
||||||
|
stack.push({
|
||||||
|
items: filtered,
|
||||||
|
parents: [].concat(_toConsumableArray(current.parents), [part]),
|
||||||
|
path: part.dataBound !== false && !part.attrParsed && part.value && !part.attrParsed ? [].concat(_toConsumableArray(current.path), [part.value]) : _toConsumableArray(current.path)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function getLocalTags(tags, path) {
|
||||||
|
var sizeScope = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : path.length;
|
||||||
|
var localTags = tags;
|
||||||
|
for (var i = 0; i < sizeScope; i++) {
|
||||||
|
localTags = localTags[path[i]];
|
||||||
|
}
|
||||||
|
return localTags;
|
||||||
|
}
|
||||||
|
function getScopeSize(part, parents) {
|
||||||
|
var size = parents.length;
|
||||||
|
for (var _i2 = 0; _i2 < parents.length; _i2++) {
|
||||||
|
var parent = parents[_i2];
|
||||||
|
var lIndexLoop = typeof parent.lIndex === "number" ? parent.lIndex : parseInt(parent.lIndex.split("-")[0], 10);
|
||||||
|
if (lIndexLoop > part.lIndex) {
|
||||||
|
size--;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return size;
|
||||||
|
}
|
||||||
|
while (stack.length > 0) {
|
||||||
|
var current = stack.pop();
|
||||||
|
var localTags = getLocalTags(tags, current.path);
|
||||||
|
for (var _i4 = 0, _current$items2 = current.items; _i4 < _current$items2.length; _i4++) {
|
||||||
|
var _localTags4, _part$value2;
|
||||||
|
var part = _current$items2[_i4];
|
||||||
|
if (part.attrParsed) {
|
||||||
|
for (var key in part.attrParsed) {
|
||||||
|
processFiltered(part, current, part.attrParsed[key].filter(isPlaceholder));
|
||||||
|
}
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (part.subparsed) {
|
||||||
|
if (part.dataBound !== false) {
|
||||||
|
var _localTags, _part$value;
|
||||||
|
(_localTags = localTags)[_part$value = part.value] || (_localTags[_part$value] = {});
|
||||||
|
}
|
||||||
|
processFiltered(part, current, part.subparsed.filter(isPlaceholder));
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (part.cellParsed) {
|
||||||
|
for (var _i6 = 0, _part$cellPostParsed2 = part.cellPostParsed; _i6 < _part$cellPostParsed2.length; _i6++) {
|
||||||
|
var cp = _part$cellPostParsed2[_i6];
|
||||||
|
if (cp.type === "placeholder") {
|
||||||
|
if (cp.module === "pro-xml-templating/xls-module-loop") {
|
||||||
|
continue;
|
||||||
|
} else if (cp.subparsed) {
|
||||||
|
var _localTags2, _cp$value;
|
||||||
|
(_localTags2 = localTags)[_cp$value = cp.value] || (_localTags2[_cp$value] = {});
|
||||||
|
processFiltered(cp, current, cp.subparsed.filter(isPlaceholder));
|
||||||
|
} else {
|
||||||
|
var _localTags3, _cp$value2;
|
||||||
|
var sizeScope = getScopeSize(part, current.parents);
|
||||||
|
localTags = getLocalTags(tags, current.path, sizeScope);
|
||||||
|
(_localTags3 = localTags)[_cp$value2 = cp.value] || (_localTags3[_cp$value2] = {});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (part.dataBound === false) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
(_localTags4 = localTags)[_part$value2 = part.value] || (_localTags4[_part$value2] = {});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return tags;
|
||||||
|
}
|
||||||
|
module.exports = {
|
||||||
|
getTags: getTags,
|
||||||
|
isPlaceholder: isPlaceholder
|
||||||
|
};
|
||||||
29
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/inspect-module.d.ts
generated
vendored
Normal file
29
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/inspect-module.d.ts
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
// Type definitions for Docxtemplater 3
|
||||||
|
// Project: https://github.com/open-xml-templating/docxtemplater/
|
||||||
|
// Definitions by: edi9999 <https://github.com/edi9999>
|
||||||
|
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
|
||||||
|
// TypeScript Version: 3.9
|
||||||
|
|
||||||
|
import { DXT } from "./docxtemplater";
|
||||||
|
export default class InspectModule implements DXT.Module {
|
||||||
|
constructor();
|
||||||
|
getAllTags(): Record<string, unknown>;
|
||||||
|
getTags(file?: string): Record<string, unknown>;
|
||||||
|
fullInspected: Record<
|
||||||
|
string,
|
||||||
|
{
|
||||||
|
nullValues: {
|
||||||
|
detail: {
|
||||||
|
part: DXT.Part;
|
||||||
|
scopeManager: DXT.ScopeManager;
|
||||||
|
}[];
|
||||||
|
summary: string[][];
|
||||||
|
};
|
||||||
|
}
|
||||||
|
>;
|
||||||
|
|
||||||
|
getStructuredTags(file?: string): DXT.Part[];
|
||||||
|
getAllStructuredTags(): DXT.Part[];
|
||||||
|
getFileType(): string;
|
||||||
|
getTemplatedFiles(): string[];
|
||||||
|
}
|
||||||
190
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/inspect-module.js
generated
vendored
Normal file
190
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/inspect-module.js
generated
vendored
Normal file
@@ -0,0 +1,190 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
function _typeof(o) { "@babel/helpers - typeof"; return _typeof = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function (o) { return typeof o; } : function (o) { return o && "function" == typeof Symbol && o.constructor === Symbol && o !== Symbol.prototype ? "symbol" : typeof o; }, _typeof(o); }
|
||||||
|
function ownKeys(e, r) { var t = Object.keys(e); if (Object.getOwnPropertySymbols) { var o = Object.getOwnPropertySymbols(e); r && (o = o.filter(function (r) { return Object.getOwnPropertyDescriptor(e, r).enumerable; })), t.push.apply(t, o); } return t; }
|
||||||
|
function _objectSpread(e) { for (var r = 1; r < arguments.length; r++) { var t = null != arguments[r] ? arguments[r] : {}; r % 2 ? ownKeys(Object(t), !0).forEach(function (r) { _defineProperty(e, r, t[r]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(e, Object.getOwnPropertyDescriptors(t)) : ownKeys(Object(t)).forEach(function (r) { Object.defineProperty(e, r, Object.getOwnPropertyDescriptor(t, r)); }); } return e; }
|
||||||
|
function _defineProperty(e, r, t) { return (r = _toPropertyKey(r)) in e ? Object.defineProperty(e, r, { value: t, enumerable: !0, configurable: !0, writable: !0 }) : e[r] = t, e; }
|
||||||
|
function _classCallCheck(a, n) { if (!(a instanceof n)) throw new TypeError("Cannot call a class as a function"); }
|
||||||
|
function _defineProperties(e, r) { for (var t = 0; t < r.length; t++) { var o = r[t]; o.enumerable = o.enumerable || !1, o.configurable = !0, "value" in o && (o.writable = !0), Object.defineProperty(e, _toPropertyKey(o.key), o); } }
|
||||||
|
function _createClass(e, r, t) { return r && _defineProperties(e.prototype, r), t && _defineProperties(e, t), Object.defineProperty(e, "prototype", { writable: !1 }), e; }
|
||||||
|
function _toPropertyKey(t) { var i = _toPrimitive(t, "string"); return "symbol" == _typeof(i) ? i : i + ""; }
|
||||||
|
function _toPrimitive(t, r) { if ("object" != _typeof(t) || !t) return t; var e = t[Symbol.toPrimitive]; if (void 0 !== e) { var i = e.call(t, r || "default"); if ("object" != _typeof(i)) return i; throw new TypeError("@@toPrimitive must return a primitive value."); } return ("string" === r ? String : Number)(t); }
|
||||||
|
var _require = require("lodash"),
|
||||||
|
merge = _require.merge,
|
||||||
|
cloneDeep = _require.cloneDeep;
|
||||||
|
var _require2 = require("./doc-utils.js"),
|
||||||
|
pushArray = _require2.pushArray;
|
||||||
|
var _require3 = require("./get-tags.js"),
|
||||||
|
isPlaceholder = _require3.isPlaceholder,
|
||||||
|
_getTags = _require3.getTags;
|
||||||
|
var slideNumRegex = /ppt\/slides\/slide([0-9]+).xml/;
|
||||||
|
function getSlideIndex(path) {
|
||||||
|
return parseInt(path.replace(slideNumRegex, "$1"), 10) - 1;
|
||||||
|
}
|
||||||
|
function _getStructuredTags(postParsed) {
|
||||||
|
return postParsed.filter(isPlaceholder).map(function (part) {
|
||||||
|
part.subparsed && (part.subparsed = _getStructuredTags(part.subparsed));
|
||||||
|
if (part.attrParsed) {
|
||||||
|
part.subparsed = [];
|
||||||
|
if (part.attrParsed) {
|
||||||
|
part.subparsed = [];
|
||||||
|
for (var key in part.attrParsed) {
|
||||||
|
part.subparsed = part.subparsed.concat(part.attrParsed[key]);
|
||||||
|
}
|
||||||
|
return part;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return part;
|
||||||
|
}, {});
|
||||||
|
}
|
||||||
|
var InspectModule = /*#__PURE__*/function () {
|
||||||
|
function InspectModule() {
|
||||||
|
_classCallCheck(this, InspectModule);
|
||||||
|
this.name = "InspectModule";
|
||||||
|
this.inspect = {};
|
||||||
|
this.fullInspected = {};
|
||||||
|
this.filePath = null;
|
||||||
|
}
|
||||||
|
return _createClass(InspectModule, [{
|
||||||
|
key: "clone",
|
||||||
|
value: function clone() {
|
||||||
|
return new InspectModule();
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "optionsTransformer",
|
||||||
|
value: function optionsTransformer(options, docxtemplater) {
|
||||||
|
this.fileTypeConfig = docxtemplater.fileTypeConfig;
|
||||||
|
this.zip = docxtemplater.zip;
|
||||||
|
this.targets = docxtemplater.targets;
|
||||||
|
this.fileType = docxtemplater.fileType;
|
||||||
|
this.docxtemplater = docxtemplater;
|
||||||
|
return options;
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "set",
|
||||||
|
value: function set(obj) {
|
||||||
|
if (obj.data) {
|
||||||
|
this.inspect.tags = obj.data;
|
||||||
|
}
|
||||||
|
if (obj.pptxCustomMap) {
|
||||||
|
this.pptxCustomMap = obj.pptxCustomMap;
|
||||||
|
}
|
||||||
|
if (obj.pptxSectionMap) {
|
||||||
|
this.pptxSectionMap = obj.pptxSectionMap;
|
||||||
|
}
|
||||||
|
if (obj.inspect) {
|
||||||
|
if (obj.inspect.filePath) {
|
||||||
|
this.filePath = obj.inspect.filePath;
|
||||||
|
this.inspect = this.fullInspected[this.filePath] || {};
|
||||||
|
}
|
||||||
|
if (obj.inspect.content) {
|
||||||
|
this.inspect.content = obj.inspect.content;
|
||||||
|
} else if (obj.inspect.postparsed) {
|
||||||
|
this.inspect.postparsed = cloneDeep(obj.inspect.postparsed);
|
||||||
|
} else if (obj.inspect.parsed) {
|
||||||
|
this.inspect.parsed = cloneDeep(obj.inspect.parsed);
|
||||||
|
} else if (obj.inspect.lexed) {
|
||||||
|
this.inspect.lexed = cloneDeep(obj.inspect.lexed);
|
||||||
|
} else if (obj.inspect.xmllexed) {
|
||||||
|
this.inspect.xmllexed = cloneDeep(obj.inspect.xmllexed);
|
||||||
|
}
|
||||||
|
if (obj.inspect.resolved) {
|
||||||
|
this.inspect.resolved = obj.inspect.resolved;
|
||||||
|
}
|
||||||
|
this.fullInspected[this.filePath] = this.inspect;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "nullGetter",
|
||||||
|
value: function nullGetter(part, scopeManager, xt) {
|
||||||
|
var inspected = this.fullInspected[xt.filePath];
|
||||||
|
inspected.nullValues || (inspected.nullValues = {
|
||||||
|
summary: [],
|
||||||
|
detail: []
|
||||||
|
});
|
||||||
|
inspected.nullValues.detail.push({
|
||||||
|
part: part,
|
||||||
|
scopeManager: scopeManager
|
||||||
|
});
|
||||||
|
inspected.nullValues.summary.push(scopeManager.scopePath.concat(part.value));
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "getInspected",
|
||||||
|
value: function getInspected(file) {
|
||||||
|
var inspected = cloneDeep(this.fullInspected[file].postparsed);
|
||||||
|
var si = getSlideIndex(file);
|
||||||
|
if (si != null && !isNaN(si)) {
|
||||||
|
if (this.pptxCustomMap && this.pptxCustomMap[si]) {
|
||||||
|
var map = this.pptxCustomMap[si];
|
||||||
|
if (map) {
|
||||||
|
inspected = [_objectSpread(_objectSpread({}, map), {}, {
|
||||||
|
type: "placeholder",
|
||||||
|
module: "pro-xml-templating/slides-module-repeat",
|
||||||
|
subparsed: inspected
|
||||||
|
})];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (this.pptxSectionMap) {
|
||||||
|
for (var _i2 = 0, _this$pptxSectionMap2 = this.pptxSectionMap; _i2 < _this$pptxSectionMap2.length; _i2++) {
|
||||||
|
var section = _this$pptxSectionMap2[_i2];
|
||||||
|
if (section.slideIndexes.indexOf(si) !== -1) {
|
||||||
|
inspected = [_objectSpread(_objectSpread({}, section.part), {}, {
|
||||||
|
module: "pro-xml-templating/slides-module-section",
|
||||||
|
subparsed: inspected,
|
||||||
|
slideIndexes: section.slideIndexes,
|
||||||
|
subIds: section.subIds
|
||||||
|
})];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return inspected;
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "getTags",
|
||||||
|
value: function getTags(file) {
|
||||||
|
file || (file = this.fileTypeConfig.textPath(this.docxtemplater));
|
||||||
|
var inspected = this.getInspected(file);
|
||||||
|
return _getTags(inspected);
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "getAllTags",
|
||||||
|
value: function getAllTags() {
|
||||||
|
var result = {};
|
||||||
|
for (var _i4 = 0, _Object$keys2 = Object.keys(this.fullInspected); _i4 < _Object$keys2.length; _i4++) {
|
||||||
|
var file = _Object$keys2[_i4];
|
||||||
|
merge(result, this.getTags(file));
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "getStructuredTags",
|
||||||
|
value: function getStructuredTags(file) {
|
||||||
|
file || (file = this.fileTypeConfig.textPath(this.docxtemplater));
|
||||||
|
var inspected = this.getInspected(file);
|
||||||
|
return _getStructuredTags(inspected);
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "getAllStructuredTags",
|
||||||
|
value: function getAllStructuredTags() {
|
||||||
|
var result = [];
|
||||||
|
for (var file in this.fullInspected) {
|
||||||
|
pushArray(result, this.getStructuredTags(file));
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "getFileType",
|
||||||
|
value: function getFileType() {
|
||||||
|
return this.fileType;
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "getTemplatedFiles",
|
||||||
|
value: function getTemplatedFiles() {
|
||||||
|
return this.docxtemplater.templatedFiles;
|
||||||
|
}
|
||||||
|
}]);
|
||||||
|
}();
|
||||||
|
module.exports = function () {
|
||||||
|
return new InspectModule();
|
||||||
|
};
|
||||||
113
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/join-uncorrupt.js
generated
vendored
Normal file
113
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/join-uncorrupt.js
generated
vendored
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
var _require = require("./doc-utils.js"),
|
||||||
|
startsWith = _require.startsWith,
|
||||||
|
endsWith = _require.endsWith,
|
||||||
|
isStarting = _require.isStarting,
|
||||||
|
isEnding = _require.isEnding,
|
||||||
|
isWhiteSpace = _require.isWhiteSpace;
|
||||||
|
var filetypes = require("./filetypes.js");
|
||||||
|
function addEmptyParagraphAfterTable(parts) {
|
||||||
|
var lastNonEmpty = "";
|
||||||
|
for (var i = 0, len = parts.length; i < len; i++) {
|
||||||
|
var p = parts[i];
|
||||||
|
if (isWhiteSpace(p) || startsWith(p, "<w:bookmarkEnd")) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (endsWith(lastNonEmpty, "</w:tbl>")) {
|
||||||
|
if (!startsWith(p, "<w:p") && !startsWith(p, "<w:tbl") && !startsWith(p, "<w:sectPr") &&
|
||||||
|
// Tested by #regression-paragraph-after-table-header-footer
|
||||||
|
!startsWith(p, "</w:ftr>") && !startsWith(p, "</w:hdr>")) {
|
||||||
|
p = "<w:p/>".concat(p);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
lastNonEmpty = p;
|
||||||
|
parts[i] = p;
|
||||||
|
}
|
||||||
|
return parts;
|
||||||
|
}
|
||||||
|
|
||||||
|
// eslint-disable-next-line complexity
|
||||||
|
function joinUncorrupt(parts, options) {
|
||||||
|
var contains = options.fileTypeConfig.tagShouldContain || [];
|
||||||
|
/*
|
||||||
|
* Before doing this "uncorruption" method here, this was done with the
|
||||||
|
* `part.emptyValue` trick, however, there were some corruptions that were
|
||||||
|
* not handled, for example with a template like this :
|
||||||
|
*
|
||||||
|
* ------------------------------------------------
|
||||||
|
* | {-w:p falsy}My para{/falsy} | |
|
||||||
|
* | {-w:p falsy}My para{/falsy} | |
|
||||||
|
* ------------------------------------------------
|
||||||
|
*/
|
||||||
|
var collecting = "";
|
||||||
|
var currentlyCollecting = -1;
|
||||||
|
if (filetypes.docx.indexOf(options.contentType) !== -1) {
|
||||||
|
parts = addEmptyParagraphAfterTable(parts);
|
||||||
|
}
|
||||||
|
var startIndex = -1;
|
||||||
|
for (var j = 0, len2 = contains.length; j < len2; j++) {
|
||||||
|
var _contains$j = contains[j],
|
||||||
|
tag = _contains$j.tag,
|
||||||
|
shouldContain = _contains$j.shouldContain,
|
||||||
|
value = _contains$j.value,
|
||||||
|
drop = _contains$j.drop,
|
||||||
|
dropParent = _contains$j.dropParent;
|
||||||
|
for (var i = 0, len = parts.length; i < len; i++) {
|
||||||
|
var part = parts[i];
|
||||||
|
if (currentlyCollecting === j) {
|
||||||
|
if (isEnding(part, tag)) {
|
||||||
|
currentlyCollecting = -1;
|
||||||
|
if (dropParent) {
|
||||||
|
var start = -1;
|
||||||
|
for (var k = startIndex; k > 0; k--) {
|
||||||
|
if (isStarting(parts[k], dropParent)) {
|
||||||
|
start = k;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for (var _k = start; _k <= parts.length; _k++) {
|
||||||
|
if (isEnding(parts[_k], dropParent)) {
|
||||||
|
parts[_k] = "";
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
parts[_k] = "";
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
for (var _k2 = startIndex; _k2 <= i; _k2++) {
|
||||||
|
parts[_k2] = "";
|
||||||
|
}
|
||||||
|
if (!drop) {
|
||||||
|
parts[i] = collecting + value + part;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
collecting += part;
|
||||||
|
for (var _k3 = 0, len3 = shouldContain.length; _k3 < len3; _k3++) {
|
||||||
|
var sc = shouldContain[_k3];
|
||||||
|
if (isStarting(part, sc)) {
|
||||||
|
currentlyCollecting = -1;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (currentlyCollecting === -1 && isStarting(part, tag) &&
|
||||||
|
/*
|
||||||
|
* To verify that the part doesn't have multiple tags,
|
||||||
|
* such as <w:tc><w:p>
|
||||||
|
*/
|
||||||
|
part.substr(1).indexOf("<") === -1) {
|
||||||
|
// self-closing tag such as <w:t/>
|
||||||
|
if (part[part.length - 2] === "/") {
|
||||||
|
parts[i] = "";
|
||||||
|
} else {
|
||||||
|
startIndex = i;
|
||||||
|
currentlyCollecting = j;
|
||||||
|
collecting = part;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return parts;
|
||||||
|
}
|
||||||
|
module.exports = joinUncorrupt;
|
||||||
476
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/lexer.js
generated
vendored
Normal file
476
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/lexer.js
generated
vendored
Normal file
@@ -0,0 +1,476 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
function _typeof(o) { "@babel/helpers - typeof"; return _typeof = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function (o) { return typeof o; } : function (o) { return o && "function" == typeof Symbol && o.constructor === Symbol && o !== Symbol.prototype ? "symbol" : typeof o; }, _typeof(o); }
|
||||||
|
function _slicedToArray(r, e) { return _arrayWithHoles(r) || _iterableToArrayLimit(r, e) || _unsupportedIterableToArray(r, e) || _nonIterableRest(); }
|
||||||
|
function _nonIterableRest() { throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); }
|
||||||
|
function _unsupportedIterableToArray(r, a) { if (r) { if ("string" == typeof r) return _arrayLikeToArray(r, a); var t = {}.toString.call(r).slice(8, -1); return "Object" === t && r.constructor && (t = r.constructor.name), "Map" === t || "Set" === t ? Array.from(r) : "Arguments" === t || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(t) ? _arrayLikeToArray(r, a) : void 0; } }
|
||||||
|
function _arrayLikeToArray(r, a) { (null == a || a > r.length) && (a = r.length); for (var e = 0, n = Array(a); e < a; e++) n[e] = r[e]; return n; }
|
||||||
|
function _iterableToArrayLimit(r, l) { var t = null == r ? null : "undefined" != typeof Symbol && r[Symbol.iterator] || r["@@iterator"]; if (null != t) { var e, n, i, u, a = [], f = !0, o = !1; try { if (i = (t = t.call(r)).next, 0 === l) { if (Object(t) !== t) return; f = !1; } else for (; !(f = (e = i.call(t)).done) && (a.push(e.value), a.length !== l); f = !0); } catch (r) { o = !0, n = r; } finally { try { if (!f && null != t["return"] && (u = t["return"](), Object(u) !== u)) return; } finally { if (o) throw n; } } return a; } }
|
||||||
|
function _arrayWithHoles(r) { if (Array.isArray(r)) return r; }
|
||||||
|
function ownKeys(e, r) { var t = Object.keys(e); if (Object.getOwnPropertySymbols) { var o = Object.getOwnPropertySymbols(e); r && (o = o.filter(function (r) { return Object.getOwnPropertyDescriptor(e, r).enumerable; })), t.push.apply(t, o); } return t; }
|
||||||
|
function _objectSpread(e) { for (var r = 1; r < arguments.length; r++) { var t = null != arguments[r] ? arguments[r] : {}; r % 2 ? ownKeys(Object(t), !0).forEach(function (r) { _defineProperty(e, r, t[r]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(e, Object.getOwnPropertyDescriptors(t)) : ownKeys(Object(t)).forEach(function (r) { Object.defineProperty(e, r, Object.getOwnPropertyDescriptor(t, r)); }); } return e; }
|
||||||
|
function _defineProperty(e, r, t) { return (r = _toPropertyKey(r)) in e ? Object.defineProperty(e, r, { value: t, enumerable: !0, configurable: !0, writable: !0 }) : e[r] = t, e; }
|
||||||
|
function _toPropertyKey(t) { var i = _toPrimitive(t, "string"); return "symbol" == _typeof(i) ? i : i + ""; }
|
||||||
|
function _toPrimitive(t, r) { if ("object" != _typeof(t) || !t) return t; var e = t[Symbol.toPrimitive]; if (void 0 !== e) { var i = e.call(t, r || "default"); if ("object" != _typeof(i)) return i; throw new TypeError("@@toPrimitive must return a primitive value."); } return ("string" === r ? String : Number)(t); }
|
||||||
|
var _require = require("./errors.js"),
|
||||||
|
getUnclosedTagException = _require.getUnclosedTagException,
|
||||||
|
getUnopenedTagException = _require.getUnopenedTagException,
|
||||||
|
getDuplicateOpenTagException = _require.getDuplicateOpenTagException,
|
||||||
|
getDuplicateCloseTagException = _require.getDuplicateCloseTagException,
|
||||||
|
throwMalformedXml = _require.throwMalformedXml,
|
||||||
|
throwXmlInvalid = _require.throwXmlInvalid,
|
||||||
|
XTTemplateError = _require.XTTemplateError;
|
||||||
|
var _require2 = require("./doc-utils.js"),
|
||||||
|
isTextStart = _require2.isTextStart,
|
||||||
|
isTextEnd = _require2.isTextEnd,
|
||||||
|
wordToUtf8 = _require2.wordToUtf8,
|
||||||
|
pushArray = _require2.pushArray;
|
||||||
|
var DELIMITER_NONE = 0,
|
||||||
|
DELIMITER_EQUAL = 1,
|
||||||
|
DELIMITER_START = 2,
|
||||||
|
DELIMITER_END = 3;
|
||||||
|
function inRange(range, match) {
|
||||||
|
return range[0] <= match.offset && match.offset < range[1];
|
||||||
|
}
|
||||||
|
function updateInTextTag(part, inTextTag) {
|
||||||
|
if (isTextStart(part)) {
|
||||||
|
if (inTextTag) {
|
||||||
|
throwMalformedXml();
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (isTextEnd(part)) {
|
||||||
|
if (!inTextTag) {
|
||||||
|
throwMalformedXml();
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return inTextTag;
|
||||||
|
}
|
||||||
|
function getTag(tag) {
|
||||||
|
var position = "";
|
||||||
|
var start = 1;
|
||||||
|
var end = tag.indexOf(" ");
|
||||||
|
if (tag[tag.length - 2] === "/") {
|
||||||
|
position = "selfclosing";
|
||||||
|
if (end === -1) {
|
||||||
|
end = tag.length - 2;
|
||||||
|
}
|
||||||
|
} else if (tag[1] === "/") {
|
||||||
|
start = 2;
|
||||||
|
position = "end";
|
||||||
|
if (end === -1) {
|
||||||
|
end = tag.length - 1;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
position = "start";
|
||||||
|
if (end === -1) {
|
||||||
|
end = tag.length - 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
tag: tag.slice(start, end),
|
||||||
|
position: position
|
||||||
|
};
|
||||||
|
}
|
||||||
|
function tagMatcher(content, textMatchArray, othersMatchArray) {
|
||||||
|
var cursor = 0;
|
||||||
|
var contentLength = content.length;
|
||||||
|
var allMatches = {};
|
||||||
|
for (var _i2 = 0; _i2 < textMatchArray.length; _i2++) {
|
||||||
|
var m = textMatchArray[_i2];
|
||||||
|
allMatches[m] = true;
|
||||||
|
}
|
||||||
|
for (var _i4 = 0; _i4 < othersMatchArray.length; _i4++) {
|
||||||
|
var _m = othersMatchArray[_i4];
|
||||||
|
allMatches[_m] = false;
|
||||||
|
}
|
||||||
|
var totalMatches = [];
|
||||||
|
while (cursor < contentLength) {
|
||||||
|
cursor = content.indexOf("<", cursor);
|
||||||
|
if (cursor === -1) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
var offset = cursor;
|
||||||
|
var nextOpening = content.indexOf("<", cursor + 1);
|
||||||
|
cursor = content.indexOf(">", cursor);
|
||||||
|
if (cursor === -1 || nextOpening !== -1 && cursor > nextOpening) {
|
||||||
|
throwXmlInvalid(content, offset);
|
||||||
|
}
|
||||||
|
var tagText = content.slice(offset, cursor + 1);
|
||||||
|
var _getTag = getTag(tagText),
|
||||||
|
tag = _getTag.tag,
|
||||||
|
position = _getTag.position;
|
||||||
|
var text = allMatches[tag];
|
||||||
|
if (text == null) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
totalMatches.push({
|
||||||
|
type: "tag",
|
||||||
|
position: position,
|
||||||
|
text: text,
|
||||||
|
offset: offset,
|
||||||
|
value: tagText,
|
||||||
|
tag: tag
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return totalMatches;
|
||||||
|
}
|
||||||
|
function getDelimiterErrors(delimiterMatches, fullText, syntaxOptions) {
|
||||||
|
var errors = [];
|
||||||
|
var inDelimiter = false;
|
||||||
|
var lastDelimiterMatch = {
|
||||||
|
offset: 0
|
||||||
|
};
|
||||||
|
var xtag;
|
||||||
|
var delimiterWithErrors = delimiterMatches.reduce(function (delimiterAcc, currDelimiterMatch) {
|
||||||
|
var position = currDelimiterMatch.position;
|
||||||
|
var delimiterOffset = currDelimiterMatch.offset;
|
||||||
|
var lastDelimiterOffset = lastDelimiterMatch.offset;
|
||||||
|
var lastDelimiterLength = lastDelimiterMatch.length;
|
||||||
|
xtag = fullText.substr(lastDelimiterOffset, delimiterOffset - lastDelimiterOffset);
|
||||||
|
if (inDelimiter && position === "start") {
|
||||||
|
if (lastDelimiterOffset + lastDelimiterLength === delimiterOffset) {
|
||||||
|
xtag = fullText.substr(lastDelimiterOffset, delimiterOffset - lastDelimiterOffset + lastDelimiterLength + 4);
|
||||||
|
if (!syntaxOptions.allowUnclosedTag) {
|
||||||
|
errors.push(getDuplicateOpenTagException({
|
||||||
|
xtag: xtag,
|
||||||
|
offset: lastDelimiterOffset
|
||||||
|
}));
|
||||||
|
lastDelimiterMatch = currDelimiterMatch;
|
||||||
|
delimiterAcc.push(_objectSpread(_objectSpread({}, currDelimiterMatch), {}, {
|
||||||
|
error: true
|
||||||
|
}));
|
||||||
|
return delimiterAcc;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (!syntaxOptions.allowUnclosedTag) {
|
||||||
|
errors.push(getUnclosedTagException({
|
||||||
|
xtag: wordToUtf8(xtag),
|
||||||
|
offset: lastDelimiterOffset
|
||||||
|
}));
|
||||||
|
lastDelimiterMatch = currDelimiterMatch;
|
||||||
|
delimiterAcc.push(_objectSpread(_objectSpread({}, currDelimiterMatch), {}, {
|
||||||
|
error: true
|
||||||
|
}));
|
||||||
|
return delimiterAcc;
|
||||||
|
}
|
||||||
|
delimiterAcc.pop();
|
||||||
|
}
|
||||||
|
if (!inDelimiter && position === "end") {
|
||||||
|
if (syntaxOptions.allowUnopenedTag) {
|
||||||
|
return delimiterAcc;
|
||||||
|
}
|
||||||
|
if (lastDelimiterOffset + lastDelimiterLength === delimiterOffset) {
|
||||||
|
xtag = fullText.substr(lastDelimiterOffset - 4, delimiterOffset - lastDelimiterOffset + lastDelimiterLength + 4);
|
||||||
|
errors.push(getDuplicateCloseTagException({
|
||||||
|
xtag: xtag,
|
||||||
|
offset: lastDelimiterOffset
|
||||||
|
}));
|
||||||
|
lastDelimiterMatch = currDelimiterMatch;
|
||||||
|
delimiterAcc.push(_objectSpread(_objectSpread({}, currDelimiterMatch), {}, {
|
||||||
|
error: true
|
||||||
|
}));
|
||||||
|
return delimiterAcc;
|
||||||
|
}
|
||||||
|
errors.push(getUnopenedTagException({
|
||||||
|
xtag: xtag,
|
||||||
|
offset: delimiterOffset
|
||||||
|
}));
|
||||||
|
lastDelimiterMatch = currDelimiterMatch;
|
||||||
|
delimiterAcc.push(_objectSpread(_objectSpread({}, currDelimiterMatch), {}, {
|
||||||
|
error: true
|
||||||
|
}));
|
||||||
|
return delimiterAcc;
|
||||||
|
}
|
||||||
|
inDelimiter = position === "start";
|
||||||
|
lastDelimiterMatch = currDelimiterMatch;
|
||||||
|
delimiterAcc.push(currDelimiterMatch);
|
||||||
|
return delimiterAcc;
|
||||||
|
}, []);
|
||||||
|
if (inDelimiter) {
|
||||||
|
var lastDelimiterOffset = lastDelimiterMatch.offset;
|
||||||
|
xtag = fullText.substr(lastDelimiterOffset, fullText.length - lastDelimiterOffset);
|
||||||
|
if (!syntaxOptions.allowUnclosedTag) {
|
||||||
|
errors.push(getUnclosedTagException({
|
||||||
|
xtag: wordToUtf8(xtag),
|
||||||
|
offset: lastDelimiterOffset
|
||||||
|
}));
|
||||||
|
} else {
|
||||||
|
delimiterWithErrors.pop();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
delimiterWithErrors: delimiterWithErrors,
|
||||||
|
errors: errors
|
||||||
|
};
|
||||||
|
}
|
||||||
|
function compareOffsets(startOffset, endOffset) {
|
||||||
|
if (startOffset === -1 && endOffset === -1) {
|
||||||
|
return DELIMITER_NONE;
|
||||||
|
}
|
||||||
|
if (startOffset === endOffset) {
|
||||||
|
return DELIMITER_EQUAL;
|
||||||
|
}
|
||||||
|
if (startOffset === -1 || endOffset === -1) {
|
||||||
|
return endOffset < startOffset ? DELIMITER_START : DELIMITER_END;
|
||||||
|
}
|
||||||
|
return startOffset < endOffset ? DELIMITER_START : DELIMITER_END;
|
||||||
|
}
|
||||||
|
function splitDelimiters(inside) {
|
||||||
|
var newDelimiters = inside.split(" ");
|
||||||
|
if (newDelimiters.length !== 2) {
|
||||||
|
var err = new XTTemplateError("New Delimiters cannot be parsed");
|
||||||
|
err.properties = {
|
||||||
|
id: "change_delimiters_invalid",
|
||||||
|
explanation: "Cannot parser delimiters"
|
||||||
|
};
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
var _newDelimiters = _slicedToArray(newDelimiters, 2),
|
||||||
|
start = _newDelimiters[0],
|
||||||
|
end = _newDelimiters[1];
|
||||||
|
if (start.length === 0 || end.length === 0) {
|
||||||
|
var _err = new XTTemplateError("New Delimiters cannot be parsed");
|
||||||
|
_err.properties = {
|
||||||
|
id: "change_delimiters_invalid",
|
||||||
|
explanation: "Cannot parser delimiters"
|
||||||
|
};
|
||||||
|
throw _err;
|
||||||
|
}
|
||||||
|
return [start, end];
|
||||||
|
}
|
||||||
|
function getAllDelimiterIndexes(fullText, delimiters, syntaxOptions) {
|
||||||
|
var indexes = [];
|
||||||
|
var start = delimiters.start,
|
||||||
|
end = delimiters.end;
|
||||||
|
var offset = -1;
|
||||||
|
var insideTag = false;
|
||||||
|
if (start == null && end == null) {
|
||||||
|
// Special case of delimiter set to null/null, no templating is done
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
while (true) {
|
||||||
|
var startOffset = fullText.indexOf(start, offset + 1);
|
||||||
|
var endOffset = fullText.indexOf(end, offset + 1);
|
||||||
|
var position = null;
|
||||||
|
var len = void 0;
|
||||||
|
var compareResult = compareOffsets(startOffset, endOffset);
|
||||||
|
if (compareResult === DELIMITER_EQUAL) {
|
||||||
|
compareResult = insideTag ? DELIMITER_END : DELIMITER_START;
|
||||||
|
}
|
||||||
|
switch (compareResult) {
|
||||||
|
case DELIMITER_NONE:
|
||||||
|
return indexes;
|
||||||
|
case DELIMITER_END:
|
||||||
|
insideTag = false;
|
||||||
|
offset = endOffset;
|
||||||
|
position = "end";
|
||||||
|
len = end.length;
|
||||||
|
break;
|
||||||
|
case DELIMITER_START:
|
||||||
|
insideTag = true;
|
||||||
|
offset = startOffset;
|
||||||
|
position = "start";
|
||||||
|
len = start.length;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
/*
|
||||||
|
* If tag starts with =, such as {=[ ]=}
|
||||||
|
* then the delimiters will change right after that tag.
|
||||||
|
*
|
||||||
|
* For example, with the following template :
|
||||||
|
*
|
||||||
|
* Hello {foo}, {=[ ]=}what's up with [name] ?
|
||||||
|
*
|
||||||
|
* The "foo" tag is a normal tag, the "=[ ]=" is a tag to change the
|
||||||
|
* delimiters to "[" and "]", and the last "name" is a tag with the new
|
||||||
|
* delimiters
|
||||||
|
*/
|
||||||
|
if (syntaxOptions.changeDelimiterPrefix && compareResult === DELIMITER_START && fullText[offset + start.length] === syntaxOptions.changeDelimiterPrefix) {
|
||||||
|
indexes.push({
|
||||||
|
offset: startOffset,
|
||||||
|
position: "start",
|
||||||
|
length: start.length,
|
||||||
|
changedelimiter: true
|
||||||
|
});
|
||||||
|
var nextEqual = fullText.indexOf(syntaxOptions.changeDelimiterPrefix, offset + start.length + 1);
|
||||||
|
var nextEndOffset = fullText.indexOf(end, nextEqual + 1);
|
||||||
|
indexes.push({
|
||||||
|
offset: nextEndOffset,
|
||||||
|
position: "end",
|
||||||
|
length: end.length,
|
||||||
|
changedelimiter: true
|
||||||
|
});
|
||||||
|
var _insideTag = fullText.substr(offset + start.length + 1, nextEqual - offset - start.length - 1);
|
||||||
|
var _splitDelimiters = splitDelimiters(_insideTag);
|
||||||
|
var _splitDelimiters2 = _slicedToArray(_splitDelimiters, 2);
|
||||||
|
start = _splitDelimiters2[0];
|
||||||
|
end = _splitDelimiters2[1];
|
||||||
|
offset = nextEndOffset;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
indexes.push({
|
||||||
|
offset: offset,
|
||||||
|
position: position,
|
||||||
|
length: len
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function parseDelimiters(innerContentParts, delimiters, syntaxOptions) {
|
||||||
|
var full = innerContentParts.map(function (p) {
|
||||||
|
return p.value;
|
||||||
|
}).join("");
|
||||||
|
var delimiterMatches = getAllDelimiterIndexes(full, delimiters, syntaxOptions);
|
||||||
|
var offset = 0;
|
||||||
|
var ranges = innerContentParts.map(function (part) {
|
||||||
|
offset += part.value.length;
|
||||||
|
return {
|
||||||
|
offset: offset - part.value.length,
|
||||||
|
lIndex: part.lIndex
|
||||||
|
};
|
||||||
|
});
|
||||||
|
var _getDelimiterErrors = getDelimiterErrors(delimiterMatches, full, syntaxOptions),
|
||||||
|
delimiterWithErrors = _getDelimiterErrors.delimiterWithErrors,
|
||||||
|
errors = _getDelimiterErrors.errors;
|
||||||
|
var cutNext = 0;
|
||||||
|
var delimiterIndex = 0;
|
||||||
|
var parsed = ranges.map(function (p, i) {
|
||||||
|
var offset = p.offset;
|
||||||
|
var range = [offset, offset + innerContentParts[i].value.length];
|
||||||
|
var partContent = innerContentParts[i].value;
|
||||||
|
var delimitersInOffset = [];
|
||||||
|
while (delimiterIndex < delimiterWithErrors.length && inRange(range, delimiterWithErrors[delimiterIndex])) {
|
||||||
|
delimitersInOffset.push(delimiterWithErrors[delimiterIndex]);
|
||||||
|
delimiterIndex++;
|
||||||
|
}
|
||||||
|
var parts = [];
|
||||||
|
var cursor = 0;
|
||||||
|
if (cutNext > 0) {
|
||||||
|
cursor = cutNext;
|
||||||
|
cutNext = 0;
|
||||||
|
}
|
||||||
|
for (var _i6 = 0; _i6 < delimitersInOffset.length; _i6++) {
|
||||||
|
var delimiterInOffset = delimitersInOffset[_i6];
|
||||||
|
var _value = partContent.substr(cursor, delimiterInOffset.offset - offset - cursor);
|
||||||
|
if (delimiterInOffset.changedelimiter) {
|
||||||
|
if (delimiterInOffset.position === "start") {
|
||||||
|
if (_value.length > 0) {
|
||||||
|
parts.push({
|
||||||
|
type: "content",
|
||||||
|
value: _value
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
cursor = delimiterInOffset.offset - offset + delimiterInOffset.length;
|
||||||
|
}
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (_value.length > 0) {
|
||||||
|
parts.push({
|
||||||
|
type: "content",
|
||||||
|
value: _value
|
||||||
|
});
|
||||||
|
cursor += _value.length;
|
||||||
|
}
|
||||||
|
var delimiterPart = {
|
||||||
|
type: "delimiter",
|
||||||
|
position: delimiterInOffset.position,
|
||||||
|
offset: cursor + offset
|
||||||
|
};
|
||||||
|
parts.push(delimiterPart);
|
||||||
|
cursor = delimiterInOffset.offset - offset + delimiterInOffset.length;
|
||||||
|
}
|
||||||
|
cutNext = cursor - partContent.length;
|
||||||
|
var value = partContent.substr(cursor);
|
||||||
|
if (value.length > 0) {
|
||||||
|
parts.push({
|
||||||
|
type: "content",
|
||||||
|
value: value
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return parts;
|
||||||
|
}, this);
|
||||||
|
return {
|
||||||
|
parsed: parsed,
|
||||||
|
errors: errors
|
||||||
|
};
|
||||||
|
}
|
||||||
|
function isInsideContent(part) {
|
||||||
|
// Stryker disable all : because the part.position === "insidetag" would be enough but we want to make the API future proof
|
||||||
|
return part.type === "content" && part.position === "insidetag";
|
||||||
|
// Stryker restore all
|
||||||
|
}
|
||||||
|
function getContentParts(xmlparsed) {
|
||||||
|
return xmlparsed.filter(isInsideContent);
|
||||||
|
}
|
||||||
|
function decodeContentParts(xmlparsed, fileType) {
|
||||||
|
var inTextTag = false;
|
||||||
|
for (var _i8 = 0; _i8 < xmlparsed.length; _i8++) {
|
||||||
|
var part = xmlparsed[_i8];
|
||||||
|
inTextTag = updateInTextTag(part, inTextTag);
|
||||||
|
if (part.type === "content") {
|
||||||
|
part.position = inTextTag ? "insidetag" : "outsidetag";
|
||||||
|
}
|
||||||
|
if (fileType !== "text" && isInsideContent(part)) {
|
||||||
|
part.value = part.value.replace(/>/g, ">");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
module.exports = {
|
||||||
|
parseDelimiters: parseDelimiters,
|
||||||
|
parse: function parse(xmllexed, delimiters, syntax, fileType) {
|
||||||
|
decodeContentParts(xmllexed, fileType);
|
||||||
|
var _parseDelimiters = parseDelimiters(getContentParts(xmllexed), delimiters, syntax),
|
||||||
|
delimiterParsed = _parseDelimiters.parsed,
|
||||||
|
errors = _parseDelimiters.errors;
|
||||||
|
var lexed = [];
|
||||||
|
var index = 0;
|
||||||
|
var lIndex = 0;
|
||||||
|
for (var _i0 = 0; _i0 < xmllexed.length; _i0++) {
|
||||||
|
var part = xmllexed[_i0];
|
||||||
|
if (isInsideContent(part)) {
|
||||||
|
for (var _i10 = 0, _delimiterParsed$inde2 = delimiterParsed[index]; _i10 < _delimiterParsed$inde2.length; _i10++) {
|
||||||
|
var p = _delimiterParsed$inde2[_i10];
|
||||||
|
if (p.type === "content") {
|
||||||
|
p.position = "insidetag";
|
||||||
|
}
|
||||||
|
p.lIndex = lIndex++;
|
||||||
|
}
|
||||||
|
pushArray(lexed, delimiterParsed[index]);
|
||||||
|
index++;
|
||||||
|
} else {
|
||||||
|
part.lIndex = lIndex++;
|
||||||
|
lexed.push(part);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
errors: errors,
|
||||||
|
lexed: lexed
|
||||||
|
};
|
||||||
|
},
|
||||||
|
xmlparse: function xmlparse(content, xmltags) {
|
||||||
|
var matches = tagMatcher(content, xmltags.text, xmltags.other);
|
||||||
|
var cursor = 0;
|
||||||
|
var parsed = [];
|
||||||
|
for (var _i12 = 0; _i12 < matches.length; _i12++) {
|
||||||
|
var match = matches[_i12];
|
||||||
|
if (content.length > cursor && match.offset - cursor > 0) {
|
||||||
|
parsed.push({
|
||||||
|
type: "content",
|
||||||
|
value: content.substr(cursor, match.offset - cursor)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
cursor = match.offset + match.value.length;
|
||||||
|
delete match.offset;
|
||||||
|
parsed.push(match);
|
||||||
|
}
|
||||||
|
if (content.length > cursor) {
|
||||||
|
parsed.push({
|
||||||
|
type: "content",
|
||||||
|
value: content.substr(cursor)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return parsed;
|
||||||
|
}
|
||||||
|
};
|
||||||
36
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/memory-test.js
generated
vendored
Normal file
36
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/memory-test.js
generated
vendored
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
var fs = require("fs");
|
||||||
|
var PizZip = require("pizzip");
|
||||||
|
var Docxtemplater = require("./docxtemplater.js");
|
||||||
|
var zip = new PizZip(fs.readFileSync("examples/memory-stress.docx"));
|
||||||
|
var doc = new Docxtemplater(zip, {
|
||||||
|
paragraphLoop: true
|
||||||
|
});
|
||||||
|
var a = [];
|
||||||
|
for (var i = 0, len = 500; i < len; i++) {
|
||||||
|
var b = [];
|
||||||
|
for (var j = 0, len2 = 500; j < len2; j++) {
|
||||||
|
b.push({
|
||||||
|
title: i + j,
|
||||||
|
c: [{
|
||||||
|
content: "Hi"
|
||||||
|
}, {
|
||||||
|
content: "Ho"
|
||||||
|
}]
|
||||||
|
});
|
||||||
|
}
|
||||||
|
a.push({
|
||||||
|
b: b
|
||||||
|
});
|
||||||
|
}
|
||||||
|
doc.render({
|
||||||
|
a: a
|
||||||
|
});
|
||||||
|
var buf = doc.toBuffer();
|
||||||
|
var minSize = 500;
|
||||||
|
if (buf.length < minSize * 1000 * 1000) {
|
||||||
|
throw new Error("The output document should be at least ${minSize} MB");
|
||||||
|
}
|
||||||
|
// eslint-disable-next-line no-console
|
||||||
|
console.log("memory-test buffer length : ", buf.length);
|
||||||
34
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/merge-sort.js
generated
vendored
Normal file
34
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/merge-sort.js
generated
vendored
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
function getMinFromArrays(arrays, state) {
|
||||||
|
var minIndex = -1;
|
||||||
|
for (var i = 0, l = arrays.length; i < l; i++) {
|
||||||
|
if (state[i] >= arrays[i].length) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (minIndex === -1 || arrays[i][state[i]].offset < arrays[minIndex][state[minIndex]].offset) {
|
||||||
|
minIndex = i;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return minIndex;
|
||||||
|
}
|
||||||
|
module.exports = function (arrays) {
|
||||||
|
var totalLength = 0;
|
||||||
|
for (var _i2 = 0, _arrays2 = arrays; _i2 < _arrays2.length; _i2++) {
|
||||||
|
var array = _arrays2[_i2];
|
||||||
|
totalLength += array.length;
|
||||||
|
}
|
||||||
|
arrays = arrays.filter(function (array) {
|
||||||
|
return array.length > 0;
|
||||||
|
});
|
||||||
|
var resultArray = new Array(totalLength);
|
||||||
|
var state = arrays.map(function () {
|
||||||
|
return 0;
|
||||||
|
});
|
||||||
|
for (var i = 0; i < totalLength; i++) {
|
||||||
|
var arrayIndex = getMinFromArrays(arrays, state);
|
||||||
|
resultArray[i] = arrays[arrayIndex][state[arrayIndex]];
|
||||||
|
state[arrayIndex]++;
|
||||||
|
}
|
||||||
|
return resultArray;
|
||||||
|
};
|
||||||
284
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/minizod.js
generated
vendored
Normal file
284
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/minizod.js
generated
vendored
Normal file
@@ -0,0 +1,284 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
function _slicedToArray(r, e) { return _arrayWithHoles(r) || _iterableToArrayLimit(r, e) || _unsupportedIterableToArray(r, e) || _nonIterableRest(); }
|
||||||
|
function _nonIterableRest() { throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); }
|
||||||
|
function _unsupportedIterableToArray(r, a) { if (r) { if ("string" == typeof r) return _arrayLikeToArray(r, a); var t = {}.toString.call(r).slice(8, -1); return "Object" === t && r.constructor && (t = r.constructor.name), "Map" === t || "Set" === t ? Array.from(r) : "Arguments" === t || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(t) ? _arrayLikeToArray(r, a) : void 0; } }
|
||||||
|
function _arrayLikeToArray(r, a) { (null == a || a > r.length) && (a = r.length); for (var e = 0, n = Array(a); e < a; e++) n[e] = r[e]; return n; }
|
||||||
|
function _iterableToArrayLimit(r, l) { var t = null == r ? null : "undefined" != typeof Symbol && r[Symbol.iterator] || r["@@iterator"]; if (null != t) { var e, n, i, u, a = [], f = !0, o = !1; try { if (i = (t = t.call(r)).next, 0 === l) { if (Object(t) !== t) return; f = !1; } else for (; !(f = (e = i.call(t)).done) && (a.push(e.value), a.length !== l); f = !0); } catch (r) { o = !0, n = r; } finally { try { if (!f && null != t["return"] && (u = t["return"](), Object(u) !== u)) return; } finally { if (o) throw n; } } return a; } }
|
||||||
|
function _arrayWithHoles(r) { if (Array.isArray(r)) return r; }
|
||||||
|
function _typeof(o) { "@babel/helpers - typeof"; return _typeof = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function (o) { return typeof o; } : function (o) { return o && "function" == typeof Symbol && o.constructor === Symbol && o !== Symbol.prototype ? "symbol" : typeof o; }, _typeof(o); }
|
||||||
|
function _classCallCheck(a, n) { if (!(a instanceof n)) throw new TypeError("Cannot call a class as a function"); }
|
||||||
|
function _defineProperties(e, r) { for (var t = 0; t < r.length; t++) { var o = r[t]; o.enumerable = o.enumerable || !1, o.configurable = !0, "value" in o && (o.writable = !0), Object.defineProperty(e, _toPropertyKey(o.key), o); } }
|
||||||
|
function _createClass(e, r, t) { return r && _defineProperties(e.prototype, r), t && _defineProperties(e, t), Object.defineProperty(e, "prototype", { writable: !1 }), e; }
|
||||||
|
function _toPropertyKey(t) { var i = _toPrimitive(t, "string"); return "symbol" == _typeof(i) ? i : i + ""; }
|
||||||
|
function _toPrimitive(t, r) { if ("object" != _typeof(t) || !t) return t; var e = t[Symbol.toPrimitive]; if (void 0 !== e) { var i = e.call(t, r || "default"); if ("object" != _typeof(i)) return i; throw new TypeError("@@toPrimitive must return a primitive value."); } return ("string" === r ? String : Number)(t); }
|
||||||
|
var MiniZod = /*#__PURE__*/function () {
|
||||||
|
function MiniZod() {
|
||||||
|
_classCallCheck(this, MiniZod);
|
||||||
|
}
|
||||||
|
return _createClass(MiniZod, null, [{
|
||||||
|
key: "createSchema",
|
||||||
|
value: function createSchema(validateFn) {
|
||||||
|
var schema = {
|
||||||
|
validate: validateFn,
|
||||||
|
optional: function optional() {
|
||||||
|
return MiniZod.createSchema(function (value) {
|
||||||
|
return value === undefined ? {
|
||||||
|
success: true,
|
||||||
|
value: value
|
||||||
|
} : validateFn(value);
|
||||||
|
});
|
||||||
|
},
|
||||||
|
nullable: function nullable() {
|
||||||
|
return MiniZod.createSchema(function (value) {
|
||||||
|
return value == null ? {
|
||||||
|
success: true,
|
||||||
|
value: value
|
||||||
|
} : validateFn(value);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
return schema;
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "string",
|
||||||
|
value: function string() {
|
||||||
|
return MiniZod.createSchema(function (value) {
|
||||||
|
if (typeof value !== "string") {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: "Expected string, received ".concat(_typeof(value))
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
value: value
|
||||||
|
};
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "date",
|
||||||
|
value: function date() {
|
||||||
|
return MiniZod.createSchema(function (value) {
|
||||||
|
if (!(value instanceof Date)) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: "Expected date, received ".concat(_typeof(value))
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
value: value
|
||||||
|
};
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "boolean",
|
||||||
|
value: function _boolean() {
|
||||||
|
return MiniZod.createSchema(function (value) {
|
||||||
|
if (typeof value !== "boolean") {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: "Expected boolean, received ".concat(_typeof(value))
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
value: value
|
||||||
|
};
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "number",
|
||||||
|
value: function number() {
|
||||||
|
return MiniZod.createSchema(function (value) {
|
||||||
|
if (typeof value !== "number") {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: "Expected number, received ".concat(_typeof(value))
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
value: value
|
||||||
|
};
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "function",
|
||||||
|
value: function _function() {
|
||||||
|
return MiniZod.createSchema(function (value) {
|
||||||
|
if (typeof value !== "function") {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: "Expected function, received ".concat(_typeof(value))
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
value: value
|
||||||
|
};
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "array",
|
||||||
|
value: function array(itemSchema) {
|
||||||
|
return MiniZod.createSchema(function (value) {
|
||||||
|
if (!Array.isArray(value)) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: "Expected array, received ".concat(_typeof(value))
|
||||||
|
};
|
||||||
|
}
|
||||||
|
for (var i = 0; i < value.length; i++) {
|
||||||
|
var result = itemSchema.validate(value[i]);
|
||||||
|
if (!result.success) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: "".concat(result.error, " at index ").concat(i)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
value: value
|
||||||
|
};
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "any",
|
||||||
|
value: function any() {
|
||||||
|
return MiniZod.createSchema(function (value) {
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
value: value
|
||||||
|
};
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "isRegex",
|
||||||
|
value: function isRegex() {
|
||||||
|
return MiniZod.createSchema(function (value) {
|
||||||
|
if (!(value instanceof RegExp)) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: "Expected RegExp, received ".concat(_typeof(value))
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
value: value
|
||||||
|
};
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "union",
|
||||||
|
value: function union(schemas) {
|
||||||
|
return MiniZod.createSchema(function (value) {
|
||||||
|
for (var _i2 = 0; _i2 < schemas.length; _i2++) {
|
||||||
|
var s = schemas[_i2];
|
||||||
|
var result = s.validate(value);
|
||||||
|
if (result.success) {
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: "Value ".concat(value, " does not match any schema in union")
|
||||||
|
};
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "object",
|
||||||
|
value: function object(shape) {
|
||||||
|
var schema = MiniZod.createSchema(function (value) {
|
||||||
|
if (value == null) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: "Expected object, received ".concat(value)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
if (_typeof(value) !== "object") {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: "Expected object, received ".concat(_typeof(value))
|
||||||
|
};
|
||||||
|
}
|
||||||
|
for (var _i4 = 0, _Object$entries2 = Object.entries(shape); _i4 < _Object$entries2.length; _i4++) {
|
||||||
|
var _Object$entries2$_i = _slicedToArray(_Object$entries2[_i4], 2),
|
||||||
|
key = _Object$entries2$_i[0],
|
||||||
|
validator = _Object$entries2$_i[1];
|
||||||
|
var result = validator.validate(value[key]);
|
||||||
|
if (!result.success) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: "".concat(result.error, " at ").concat(key)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
value: value
|
||||||
|
};
|
||||||
|
});
|
||||||
|
schema.strict = function () {
|
||||||
|
return MiniZod.createSchema(function (value) {
|
||||||
|
var baseResult = schema.validate(value);
|
||||||
|
if (!baseResult.success) {
|
||||||
|
return baseResult;
|
||||||
|
}
|
||||||
|
var extraKeys = Object.keys(value).filter(function (key) {
|
||||||
|
return !(key in shape);
|
||||||
|
});
|
||||||
|
if (extraKeys.length > 0) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: "Unexpected properties: ".concat(extraKeys.join(", "))
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return baseResult;
|
||||||
|
});
|
||||||
|
};
|
||||||
|
return schema;
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "record",
|
||||||
|
value: function record(valueSchema) {
|
||||||
|
return MiniZod.createSchema(function (value) {
|
||||||
|
if (value === null) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: "Expected object, received null"
|
||||||
|
};
|
||||||
|
}
|
||||||
|
if (_typeof(value) !== "object") {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: "Expected object, received ".concat(_typeof(value))
|
||||||
|
};
|
||||||
|
}
|
||||||
|
for (var _i6 = 0, _Object$keys2 = Object.keys(value); _i6 < _Object$keys2.length; _i6++) {
|
||||||
|
var key = _Object$keys2[_i6];
|
||||||
|
if (typeof key !== "string") {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: "Expected string key, received ".concat(_typeof(key), " at ").concat(key)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
var result = valueSchema.validate(value[key]);
|
||||||
|
if (!result.success) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: "".concat(result.error, " at key ").concat(key)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
value: value
|
||||||
|
};
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}]);
|
||||||
|
}();
|
||||||
|
module.exports = MiniZod;
|
||||||
44
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/module-wrapper.js
generated
vendored
Normal file
44
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/module-wrapper.js
generated
vendored
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
var _require = require("./errors.js"),
|
||||||
|
XTInternalError = _require.XTInternalError;
|
||||||
|
function emptyFun() {}
|
||||||
|
function identity(i) {
|
||||||
|
return i;
|
||||||
|
}
|
||||||
|
module.exports = function (module) {
|
||||||
|
var defaults = {
|
||||||
|
set: emptyFun,
|
||||||
|
matchers: function matchers() {
|
||||||
|
return [];
|
||||||
|
},
|
||||||
|
parse: emptyFun,
|
||||||
|
render: emptyFun,
|
||||||
|
getTraits: emptyFun,
|
||||||
|
getFileType: emptyFun,
|
||||||
|
nullGetter: emptyFun,
|
||||||
|
optionsTransformer: identity,
|
||||||
|
postrender: identity,
|
||||||
|
errorsTransformer: identity,
|
||||||
|
getRenderedMap: identity,
|
||||||
|
preparse: identity,
|
||||||
|
postparse: identity,
|
||||||
|
on: emptyFun,
|
||||||
|
resolve: emptyFun,
|
||||||
|
preResolve: emptyFun
|
||||||
|
};
|
||||||
|
if (Object.keys(defaults).every(function (key) {
|
||||||
|
return !module[key];
|
||||||
|
})) {
|
||||||
|
var err = new XTInternalError("This module cannot be wrapped, because it doesn't define any of the necessary functions");
|
||||||
|
err.properties = {
|
||||||
|
id: "module_cannot_be_wrapped",
|
||||||
|
explanation: "This module cannot be wrapped, because it doesn't define any of the necessary functions"
|
||||||
|
};
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
for (var key in defaults) {
|
||||||
|
module[key] || (module[key] = defaults[key]);
|
||||||
|
}
|
||||||
|
return module;
|
||||||
|
};
|
||||||
74
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/modules/common.js
generated
vendored
Normal file
74
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/modules/common.js
generated
vendored
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
function _typeof(o) { "@babel/helpers - typeof"; return _typeof = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function (o) { return typeof o; } : function (o) { return o && "function" == typeof Symbol && o.constructor === Symbol && o !== Symbol.prototype ? "symbol" : typeof o; }, _typeof(o); }
|
||||||
|
function _classCallCheck(a, n) { if (!(a instanceof n)) throw new TypeError("Cannot call a class as a function"); }
|
||||||
|
function _defineProperties(e, r) { for (var t = 0; t < r.length; t++) { var o = r[t]; o.enumerable = o.enumerable || !1, o.configurable = !0, "value" in o && (o.writable = !0), Object.defineProperty(e, _toPropertyKey(o.key), o); } }
|
||||||
|
function _createClass(e, r, t) { return r && _defineProperties(e.prototype, r), t && _defineProperties(e, t), Object.defineProperty(e, "prototype", { writable: !1 }), e; }
|
||||||
|
function _toPropertyKey(t) { var i = _toPrimitive(t, "string"); return "symbol" == _typeof(i) ? i : i + ""; }
|
||||||
|
function _toPrimitive(t, r) { if ("object" != _typeof(t) || !t) return t; var e = t[Symbol.toPrimitive]; if (void 0 !== e) { var i = e.call(t, r || "default"); if ("object" != _typeof(i)) return i; throw new TypeError("@@toPrimitive must return a primitive value."); } return ("string" === r ? String : Number)(t); }
|
||||||
|
var _require = require("../doc-utils.js"),
|
||||||
|
pushArray = _require.pushArray;
|
||||||
|
var wrapper = require("../module-wrapper.js");
|
||||||
|
var filetypes = require("../filetypes.js");
|
||||||
|
var _require2 = require("../content-types.js"),
|
||||||
|
settingsContentType = _require2.settingsContentType,
|
||||||
|
coreContentType = _require2.coreContentType,
|
||||||
|
appContentType = _require2.appContentType,
|
||||||
|
customContentType = _require2.customContentType,
|
||||||
|
diagramDataContentType = _require2.diagramDataContentType,
|
||||||
|
diagramDrawingContentType = _require2.diagramDrawingContentType;
|
||||||
|
var commonContentTypes = [settingsContentType, coreContentType, appContentType, customContentType, diagramDataContentType, diagramDrawingContentType];
|
||||||
|
var Common = /*#__PURE__*/function () {
|
||||||
|
function Common() {
|
||||||
|
_classCallCheck(this, Common);
|
||||||
|
this.name = "Common";
|
||||||
|
}
|
||||||
|
return _createClass(Common, [{
|
||||||
|
key: "getFileType",
|
||||||
|
value: function getFileType(_ref) {
|
||||||
|
var doc = _ref.doc;
|
||||||
|
var invertedContentTypes = doc.invertedContentTypes;
|
||||||
|
if (!invertedContentTypes) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
for (var _i2 = 0; _i2 < commonContentTypes.length; _i2++) {
|
||||||
|
var ct = commonContentTypes[_i2];
|
||||||
|
if (invertedContentTypes[ct]) {
|
||||||
|
pushArray(doc.targets, invertedContentTypes[ct]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
var keys = ["docx", "pptx", "xlsx"];
|
||||||
|
var ftCandidate;
|
||||||
|
for (var _i4 = 0; _i4 < keys.length; _i4++) {
|
||||||
|
var key = keys[_i4];
|
||||||
|
var contentTypes = filetypes[key];
|
||||||
|
for (var _i6 = 0; _i6 < contentTypes.length; _i6++) {
|
||||||
|
var _ct = contentTypes[_i6];
|
||||||
|
if (invertedContentTypes[_ct]) {
|
||||||
|
for (var _i8 = 0, _invertedContentTypes2 = invertedContentTypes[_ct]; _i8 < _invertedContentTypes2.length; _i8++) {
|
||||||
|
var target = _invertedContentTypes2[_i8];
|
||||||
|
if (doc.relsTypes[target] && ["http://purl.oclc.org/ooxml/officeDocument/relationships/officeDocument", "http://schemas.openxmlformats.org/officeDocument/2006/relationships/officeDocument"].indexOf(doc.relsTypes[target]) === -1) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
ftCandidate = key;
|
||||||
|
if (filetypes.main.indexOf(_ct) !== -1 || _ct === filetypes.pptx[0]) {
|
||||||
|
doc.textTarget || (doc.textTarget = target);
|
||||||
|
}
|
||||||
|
if (ftCandidate === "xlsx") {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
doc.targets.push(target);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (ftCandidate) {
|
||||||
|
return ftCandidate;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ftCandidate;
|
||||||
|
}
|
||||||
|
}]);
|
||||||
|
}();
|
||||||
|
module.exports = function () {
|
||||||
|
return wrapper(new Common());
|
||||||
|
};
|
||||||
271
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/modules/expand-pair-trait.js
generated
vendored
Normal file
271
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/modules/expand-pair-trait.js
generated
vendored
Normal file
@@ -0,0 +1,271 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
function _typeof(o) { "@babel/helpers - typeof"; return _typeof = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function (o) { return typeof o; } : function (o) { return o && "function" == typeof Symbol && o.constructor === Symbol && o !== Symbol.prototype ? "symbol" : typeof o; }, _typeof(o); }
|
||||||
|
function _classCallCheck(a, n) { if (!(a instanceof n)) throw new TypeError("Cannot call a class as a function"); }
|
||||||
|
function _defineProperties(e, r) { for (var t = 0; t < r.length; t++) { var o = r[t]; o.enumerable = o.enumerable || !1, o.configurable = !0, "value" in o && (o.writable = !0), Object.defineProperty(e, _toPropertyKey(o.key), o); } }
|
||||||
|
function _createClass(e, r, t) { return r && _defineProperties(e.prototype, r), t && _defineProperties(e, t), Object.defineProperty(e, "prototype", { writable: !1 }), e; }
|
||||||
|
function _toPropertyKey(t) { var i = _toPrimitive(t, "string"); return "symbol" == _typeof(i) ? i : i + ""; }
|
||||||
|
function _toPrimitive(t, r) { if ("object" != _typeof(t) || !t) return t; var e = t[Symbol.toPrimitive]; if (void 0 !== e) { var i = e.call(t, r || "default"); if ("object" != _typeof(i)) return i; throw new TypeError("@@toPrimitive must return a primitive value."); } return ("string" === r ? String : Number)(t); }
|
||||||
|
var traitName = "expandPair";
|
||||||
|
var mergeSort = require("../merge-sort.js");
|
||||||
|
var _require = require("../doc-utils.js"),
|
||||||
|
getLeft = _require.getLeft,
|
||||||
|
getRight = _require.getRight,
|
||||||
|
pushArray = _require.pushArray;
|
||||||
|
var wrapper = require("../module-wrapper.js");
|
||||||
|
var _require2 = require("../traits.js"),
|
||||||
|
getExpandToDefault = _require2.getExpandToDefault;
|
||||||
|
var _require3 = require("../errors.js"),
|
||||||
|
getUnmatchedLoopException = _require3.getUnmatchedLoopException,
|
||||||
|
getClosingTagNotMatchOpeningTag = _require3.getClosingTagNotMatchOpeningTag,
|
||||||
|
getUnbalancedLoopException = _require3.getUnbalancedLoopException;
|
||||||
|
function getOpenCountChange(part) {
|
||||||
|
switch (part.location) {
|
||||||
|
case "start":
|
||||||
|
return 1;
|
||||||
|
case "end":
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function match(start, end) {
|
||||||
|
return start != null && end != null && (start.part.location === "start" && end.part.location === "end" && start.part.value === end.part.value || end.part.value === "");
|
||||||
|
}
|
||||||
|
function transformer(traits) {
|
||||||
|
var i = 0;
|
||||||
|
var errors = [];
|
||||||
|
while (i < traits.length) {
|
||||||
|
var part = traits[i].part;
|
||||||
|
if (part.location === "end") {
|
||||||
|
if (i === 0) {
|
||||||
|
traits.splice(0, 1);
|
||||||
|
errors.push(getUnmatchedLoopException(part));
|
||||||
|
return {
|
||||||
|
traits: traits,
|
||||||
|
errors: errors
|
||||||
|
};
|
||||||
|
}
|
||||||
|
var endIndex = i;
|
||||||
|
var startIndex = i - 1;
|
||||||
|
var offseter = 1;
|
||||||
|
if (match(traits[startIndex], traits[endIndex])) {
|
||||||
|
traits.splice(endIndex, 1);
|
||||||
|
traits.splice(startIndex, 1);
|
||||||
|
return {
|
||||||
|
errors: errors,
|
||||||
|
traits: traits
|
||||||
|
};
|
||||||
|
}
|
||||||
|
while (offseter < 50) {
|
||||||
|
var startCandidate = traits[startIndex - offseter];
|
||||||
|
var endCandidate = traits[endIndex + offseter];
|
||||||
|
if (match(startCandidate, traits[endIndex])) {
|
||||||
|
traits.splice(endIndex, 1);
|
||||||
|
traits.splice(startIndex - offseter, 1);
|
||||||
|
return {
|
||||||
|
errors: errors,
|
||||||
|
traits: traits
|
||||||
|
};
|
||||||
|
}
|
||||||
|
if (match(traits[startIndex], endCandidate)) {
|
||||||
|
traits.splice(endIndex + offseter, 1);
|
||||||
|
traits.splice(startIndex, 1);
|
||||||
|
return {
|
||||||
|
errors: errors,
|
||||||
|
traits: traits
|
||||||
|
};
|
||||||
|
}
|
||||||
|
offseter++;
|
||||||
|
}
|
||||||
|
errors.push(getClosingTagNotMatchOpeningTag({
|
||||||
|
tags: [traits[startIndex].part, traits[endIndex].part]
|
||||||
|
}));
|
||||||
|
traits.splice(endIndex, 1);
|
||||||
|
traits.splice(startIndex, 1);
|
||||||
|
return {
|
||||||
|
traits: traits,
|
||||||
|
errors: errors
|
||||||
|
};
|
||||||
|
}
|
||||||
|
i++;
|
||||||
|
}
|
||||||
|
for (var _i2 = 0; _i2 < traits.length; _i2++) {
|
||||||
|
var _part = traits[_i2].part;
|
||||||
|
errors.push(getUnmatchedLoopException(_part));
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
traits: [],
|
||||||
|
errors: errors
|
||||||
|
};
|
||||||
|
}
|
||||||
|
function getPairs(traits) {
|
||||||
|
var levelTraits = {};
|
||||||
|
var errors = [];
|
||||||
|
var pairs = [];
|
||||||
|
var transformedTraits = [];
|
||||||
|
pushArray(transformedTraits, traits);
|
||||||
|
while (transformedTraits.length > 0) {
|
||||||
|
var result = transformer(transformedTraits);
|
||||||
|
pushArray(errors, result.errors);
|
||||||
|
transformedTraits = result.traits;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Stryker disable all : because this check makes the function return quicker
|
||||||
|
if (errors.length > 0) {
|
||||||
|
return {
|
||||||
|
pairs: pairs,
|
||||||
|
errors: errors
|
||||||
|
};
|
||||||
|
}
|
||||||
|
// Stryker restore all
|
||||||
|
var countOpen = 0;
|
||||||
|
for (var _i4 = 0; _i4 < traits.length; _i4++) {
|
||||||
|
var currentTrait = traits[_i4];
|
||||||
|
var part = currentTrait.part;
|
||||||
|
var change = getOpenCountChange(part);
|
||||||
|
countOpen += change;
|
||||||
|
if (change === 1) {
|
||||||
|
levelTraits[countOpen] = currentTrait;
|
||||||
|
} else {
|
||||||
|
var startTrait = levelTraits[countOpen + 1];
|
||||||
|
if (countOpen === 0) {
|
||||||
|
pairs.push([startTrait, currentTrait]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
countOpen = countOpen >= 0 ? countOpen : 0;
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
pairs: pairs,
|
||||||
|
errors: errors
|
||||||
|
};
|
||||||
|
}
|
||||||
|
var ExpandPairTrait = /*#__PURE__*/function () {
|
||||||
|
function ExpandPairTrait() {
|
||||||
|
_classCallCheck(this, ExpandPairTrait);
|
||||||
|
this.name = "ExpandPairTrait";
|
||||||
|
}
|
||||||
|
return _createClass(ExpandPairTrait, [{
|
||||||
|
key: "optionsTransformer",
|
||||||
|
value: function optionsTransformer(options, docxtemplater) {
|
||||||
|
if (docxtemplater.options.paragraphLoop) {
|
||||||
|
pushArray(docxtemplater.fileTypeConfig.expandTags, docxtemplater.fileTypeConfig.onParagraphLoop);
|
||||||
|
}
|
||||||
|
this.expandTags = docxtemplater.fileTypeConfig.expandTags;
|
||||||
|
return options;
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "postparse",
|
||||||
|
value: function postparse(postparsed, _ref) {
|
||||||
|
var _this = this;
|
||||||
|
var getTraits = _ref.getTraits,
|
||||||
|
_postparse = _ref.postparse,
|
||||||
|
fileType = _ref.fileType;
|
||||||
|
var traits = getTraits(traitName, postparsed);
|
||||||
|
traits = traits.map(function (trait) {
|
||||||
|
return trait || [];
|
||||||
|
});
|
||||||
|
traits = mergeSort(traits);
|
||||||
|
var _getPairs = getPairs(traits),
|
||||||
|
pairs = _getPairs.pairs,
|
||||||
|
errors = _getPairs.errors;
|
||||||
|
var lastRight = 0;
|
||||||
|
var lastPair = null;
|
||||||
|
var expandedPairs = pairs.map(function (pair) {
|
||||||
|
var expandTo = pair[0].part.expandTo;
|
||||||
|
if (expandTo === "auto" && fileType !== "text") {
|
||||||
|
var result = getExpandToDefault(postparsed, pair, _this.expandTags);
|
||||||
|
if (result.error) {
|
||||||
|
errors.push(result.error);
|
||||||
|
}
|
||||||
|
expandTo = result.value;
|
||||||
|
}
|
||||||
|
if (!expandTo || fileType === "text") {
|
||||||
|
var _left = pair[0].offset;
|
||||||
|
var _right = pair[1].offset;
|
||||||
|
if (_left < lastRight && !_this.docxtemplater.options.syntax.allowUnbalancedLoops) {
|
||||||
|
errors.push(getUnbalancedLoopException(pair, lastPair));
|
||||||
|
}
|
||||||
|
lastPair = pair;
|
||||||
|
lastRight = _right;
|
||||||
|
return [_left, _right];
|
||||||
|
}
|
||||||
|
var left, right;
|
||||||
|
try {
|
||||||
|
left = getLeft(postparsed, expandTo, pair[0].offset);
|
||||||
|
} catch (e) {
|
||||||
|
errors.push(e);
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
right = getRight(postparsed, expandTo, pair[1].offset);
|
||||||
|
} catch (e) {
|
||||||
|
errors.push(e);
|
||||||
|
}
|
||||||
|
if (left < lastRight && !_this.docxtemplater.options.syntax.allowUnbalancedLoops) {
|
||||||
|
errors.push(getUnbalancedLoopException(pair, lastPair));
|
||||||
|
}
|
||||||
|
lastRight = right;
|
||||||
|
lastPair = pair;
|
||||||
|
return [left, right];
|
||||||
|
});
|
||||||
|
|
||||||
|
// Stryker disable all : because this check makes the function return quicker
|
||||||
|
if (errors.length > 0) {
|
||||||
|
return {
|
||||||
|
postparsed: postparsed,
|
||||||
|
errors: errors
|
||||||
|
};
|
||||||
|
}
|
||||||
|
// Stryker restore all
|
||||||
|
var currentPairIndex = 0;
|
||||||
|
var innerParts;
|
||||||
|
var newParsed = postparsed.reduce(function (newParsed, part, i) {
|
||||||
|
var inPair = currentPairIndex < pairs.length && expandedPairs[currentPairIndex][0] <= i && i <= expandedPairs[currentPairIndex][1];
|
||||||
|
var pair = pairs[currentPairIndex];
|
||||||
|
var expandedPair = expandedPairs[currentPairIndex];
|
||||||
|
if (!inPair) {
|
||||||
|
newParsed.push(part);
|
||||||
|
return newParsed;
|
||||||
|
}
|
||||||
|
// We're inside the pair
|
||||||
|
if (expandedPair[0] === i) {
|
||||||
|
// Start pair
|
||||||
|
innerParts = [];
|
||||||
|
}
|
||||||
|
if (pair[0].offset !== i && pair[1].offset !== i) {
|
||||||
|
// Exclude inner pair indexes
|
||||||
|
innerParts.push(part);
|
||||||
|
}
|
||||||
|
if (expandedPair[1] === i) {
|
||||||
|
// End pair
|
||||||
|
var basePart = postparsed[pair[0].offset];
|
||||||
|
basePart.subparsed = _postparse(innerParts, {
|
||||||
|
basePart: basePart
|
||||||
|
});
|
||||||
|
basePart.endLindex = pair[1].part.lIndex;
|
||||||
|
delete basePart.location;
|
||||||
|
delete basePart.expandTo;
|
||||||
|
newParsed.push(basePart);
|
||||||
|
currentPairIndex++;
|
||||||
|
var _expandedPair = expandedPairs[currentPairIndex];
|
||||||
|
while (_expandedPair && _expandedPair[0] < i) {
|
||||||
|
/*
|
||||||
|
* If we have :
|
||||||
|
* expandedPairs =[[5,72],[51,67],[90,106]]
|
||||||
|
* Then after treating [5,72], we need to treat [90,106]
|
||||||
|
* Fixed since v3.58.4
|
||||||
|
*/
|
||||||
|
currentPairIndex++;
|
||||||
|
_expandedPair = expandedPairs[currentPairIndex];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return newParsed;
|
||||||
|
}, []);
|
||||||
|
return {
|
||||||
|
postparsed: newParsed,
|
||||||
|
errors: errors
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}]);
|
||||||
|
}();
|
||||||
|
module.exports = function () {
|
||||||
|
return wrapper(new ExpandPairTrait());
|
||||||
|
};
|
||||||
98
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/modules/fix-doc-pr-corruption.js
generated
vendored
Normal file
98
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/modules/fix-doc-pr-corruption.js
generated
vendored
Normal file
@@ -0,0 +1,98 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
function _typeof(o) { "@babel/helpers - typeof"; return _typeof = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function (o) { return typeof o; } : function (o) { return o && "function" == typeof Symbol && o.constructor === Symbol && o !== Symbol.prototype ? "symbol" : typeof o; }, _typeof(o); }
|
||||||
|
function _classCallCheck(a, n) { if (!(a instanceof n)) throw new TypeError("Cannot call a class as a function"); }
|
||||||
|
function _defineProperties(e, r) { for (var t = 0; t < r.length; t++) { var o = r[t]; o.enumerable = o.enumerable || !1, o.configurable = !0, "value" in o && (o.writable = !0), Object.defineProperty(e, _toPropertyKey(o.key), o); } }
|
||||||
|
function _createClass(e, r, t) { return r && _defineProperties(e.prototype, r), t && _defineProperties(e, t), Object.defineProperty(e, "prototype", { writable: !1 }), e; }
|
||||||
|
function _toPropertyKey(t) { var i = _toPrimitive(t, "string"); return "symbol" == _typeof(i) ? i : i + ""; }
|
||||||
|
function _toPrimitive(t, r) { if ("object" != _typeof(t) || !t) return t; var e = t[Symbol.toPrimitive]; if (void 0 !== e) { var i = e.call(t, r || "default"); if ("object" != _typeof(i)) return i; throw new TypeError("@@toPrimitive must return a primitive value."); } return ("string" === r ? String : Number)(t); }
|
||||||
|
var _require = require("../doc-utils.js"),
|
||||||
|
setSingleAttribute = _require.setSingleAttribute,
|
||||||
|
isTagStart = _require.isTagStart;
|
||||||
|
var filetypes = require("../filetypes.js");
|
||||||
|
|
||||||
|
/*
|
||||||
|
* We use a class here because this object is storing "state" in this.Lexer,
|
||||||
|
* this.zip, this.xmlDocuments
|
||||||
|
*
|
||||||
|
* In version 3.34.3 and before, the state could be overwritten if the module
|
||||||
|
* was attached to two docxtemplater instances
|
||||||
|
*
|
||||||
|
* Now, since the module will be cloned if already attached, it should work
|
||||||
|
* correctly even on multiple instances in parallel
|
||||||
|
*/
|
||||||
|
var FixDocPRCorruptionModule = /*#__PURE__*/function () {
|
||||||
|
function FixDocPRCorruptionModule() {
|
||||||
|
_classCallCheck(this, FixDocPRCorruptionModule);
|
||||||
|
this.name = "FixDocPRCorruptionModule";
|
||||||
|
this.supportedFileTypes = ["docx"];
|
||||||
|
}
|
||||||
|
return _createClass(FixDocPRCorruptionModule, [{
|
||||||
|
key: "clone",
|
||||||
|
value: function clone() {
|
||||||
|
return new FixDocPRCorruptionModule();
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "set",
|
||||||
|
value: function set(options) {
|
||||||
|
if (options.Lexer) {
|
||||||
|
this.Lexer = options.Lexer;
|
||||||
|
}
|
||||||
|
if (options.zip) {
|
||||||
|
this.zip = options.zip;
|
||||||
|
}
|
||||||
|
if (options.xmlDocuments) {
|
||||||
|
this.xmlDocuments = options.xmlDocuments;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "on",
|
||||||
|
value: function on(event) {
|
||||||
|
// Stryker disable all : because this is an optimisation that won't make any tests fail
|
||||||
|
if (event !== "syncing-zip") {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// Stryker disable all : because this is an optimisation that won't make any tests fail
|
||||||
|
if (this.docxtemplater.fileType !== "docx") {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
var filesContentTypes = this.docxtemplater.filesContentTypes;
|
||||||
|
this.attached = false;
|
||||||
|
// Stryker restore all
|
||||||
|
var zip = this.zip,
|
||||||
|
Lexer = this.Lexer;
|
||||||
|
var prId = 1;
|
||||||
|
for (var _i2 = 0, _zip$file2 = zip.file(/.xml$/); _i2 < _zip$file2.length; _i2++) {
|
||||||
|
var f = _zip$file2[_i2];
|
||||||
|
var ct = filesContentTypes[f.name];
|
||||||
|
if (ct && filetypes.docx.indexOf(ct) === -1) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
var xmlDoc = this.xmlDocuments[f.name];
|
||||||
|
if (xmlDoc) {
|
||||||
|
for (var _i4 = 0, _xmlDoc$getElementsBy2 = xmlDoc.getElementsByTagName("wp:docPr"); _i4 < _xmlDoc$getElementsBy2.length; _i4++) {
|
||||||
|
var pr = _xmlDoc$getElementsBy2[_i4];
|
||||||
|
pr.setAttribute("id", prId++);
|
||||||
|
}
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
var text = f.asText();
|
||||||
|
var xmllexed = Lexer.xmlparse(text, {
|
||||||
|
text: [],
|
||||||
|
other: ["wp:docPr"]
|
||||||
|
});
|
||||||
|
if (xmllexed.length > 1) {
|
||||||
|
/* eslint-disable-next-line no-loop-func */
|
||||||
|
text = xmllexed.reduce(function (fullText, part) {
|
||||||
|
if (isTagStart("wp:docPr", part)) {
|
||||||
|
return fullText + setSingleAttribute(part.value, "id", prId++);
|
||||||
|
}
|
||||||
|
return fullText + part.value;
|
||||||
|
}, "");
|
||||||
|
}
|
||||||
|
zip.file(f.name, text);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}]);
|
||||||
|
}();
|
||||||
|
module.exports = new FixDocPRCorruptionModule();
|
||||||
486
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/modules/loop.js
generated
vendored
Normal file
486
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/modules/loop.js
generated
vendored
Normal file
@@ -0,0 +1,486 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
function _typeof(o) { "@babel/helpers - typeof"; return _typeof = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function (o) { return typeof o; } : function (o) { return o && "function" == typeof Symbol && o.constructor === Symbol && o !== Symbol.prototype ? "symbol" : typeof o; }, _typeof(o); }
|
||||||
|
function ownKeys(e, r) { var t = Object.keys(e); if (Object.getOwnPropertySymbols) { var o = Object.getOwnPropertySymbols(e); r && (o = o.filter(function (r) { return Object.getOwnPropertyDescriptor(e, r).enumerable; })), t.push.apply(t, o); } return t; }
|
||||||
|
function _objectSpread(e) { for (var r = 1; r < arguments.length; r++) { var t = null != arguments[r] ? arguments[r] : {}; r % 2 ? ownKeys(Object(t), !0).forEach(function (r) { _defineProperty(e, r, t[r]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(e, Object.getOwnPropertyDescriptors(t)) : ownKeys(Object(t)).forEach(function (r) { Object.defineProperty(e, r, Object.getOwnPropertyDescriptor(t, r)); }); } return e; }
|
||||||
|
function _defineProperty(e, r, t) { return (r = _toPropertyKey(r)) in e ? Object.defineProperty(e, r, { value: t, enumerable: !0, configurable: !0, writable: !0 }) : e[r] = t, e; }
|
||||||
|
function _slicedToArray(r, e) { return _arrayWithHoles(r) || _iterableToArrayLimit(r, e) || _unsupportedIterableToArray(r, e) || _nonIterableRest(); }
|
||||||
|
function _nonIterableRest() { throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); }
|
||||||
|
function _unsupportedIterableToArray(r, a) { if (r) { if ("string" == typeof r) return _arrayLikeToArray(r, a); var t = {}.toString.call(r).slice(8, -1); return "Object" === t && r.constructor && (t = r.constructor.name), "Map" === t || "Set" === t ? Array.from(r) : "Arguments" === t || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(t) ? _arrayLikeToArray(r, a) : void 0; } }
|
||||||
|
function _arrayLikeToArray(r, a) { (null == a || a > r.length) && (a = r.length); for (var e = 0, n = Array(a); e < a; e++) n[e] = r[e]; return n; }
|
||||||
|
function _iterableToArrayLimit(r, l) { var t = null == r ? null : "undefined" != typeof Symbol && r[Symbol.iterator] || r["@@iterator"]; if (null != t) { var e, n, i, u, a = [], f = !0, o = !1; try { if (i = (t = t.call(r)).next, 0 === l) { if (Object(t) !== t) return; f = !1; } else for (; !(f = (e = i.call(t)).done) && (a.push(e.value), a.length !== l); f = !0); } catch (r) { o = !0, n = r; } finally { try { if (!f && null != t["return"] && (u = t["return"](), Object(u) !== u)) return; } finally { if (o) throw n; } } return a; } }
|
||||||
|
function _arrayWithHoles(r) { if (Array.isArray(r)) return r; }
|
||||||
|
function _classCallCheck(a, n) { if (!(a instanceof n)) throw new TypeError("Cannot call a class as a function"); }
|
||||||
|
function _defineProperties(e, r) { for (var t = 0; t < r.length; t++) { var o = r[t]; o.enumerable = o.enumerable || !1, o.configurable = !0, "value" in o && (o.writable = !0), Object.defineProperty(e, _toPropertyKey(o.key), o); } }
|
||||||
|
function _createClass(e, r, t) { return r && _defineProperties(e.prototype, r), t && _defineProperties(e, t), Object.defineProperty(e, "prototype", { writable: !1 }), e; }
|
||||||
|
function _toPropertyKey(t) { var i = _toPrimitive(t, "string"); return "symbol" == _typeof(i) ? i : i + ""; }
|
||||||
|
function _toPrimitive(t, r) { if ("object" != _typeof(t) || !t) return t; var e = t[Symbol.toPrimitive]; if (void 0 !== e) { var i = e.call(t, r || "default"); if ("object" != _typeof(i)) return i; throw new TypeError("@@toPrimitive must return a primitive value."); } return ("string" === r ? String : Number)(t); }
|
||||||
|
var _require = require("../doc-utils.js"),
|
||||||
|
chunkBy = _require.chunkBy,
|
||||||
|
last = _require.last,
|
||||||
|
isParagraphStart = _require.isParagraphStart,
|
||||||
|
isModule = _require.isModule,
|
||||||
|
pushArray = _require.pushArray,
|
||||||
|
isParagraphEnd = _require.isParagraphEnd,
|
||||||
|
isContent = _require.isContent,
|
||||||
|
startsWith = _require.startsWith,
|
||||||
|
isTagEnd = _require.isTagEnd,
|
||||||
|
isTagStart = _require.isTagStart,
|
||||||
|
getSingleAttribute = _require.getSingleAttribute,
|
||||||
|
setSingleAttribute = _require.setSingleAttribute;
|
||||||
|
var filetypes = require("../filetypes.js");
|
||||||
|
var wrapper = require("../module-wrapper.js");
|
||||||
|
var moduleName = "loop";
|
||||||
|
function hasContent(parts) {
|
||||||
|
return parts.some(function (part) {
|
||||||
|
return isContent(part);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
function getFirstMeaningFulPart(parsed) {
|
||||||
|
for (var _i2 = 0; _i2 < parsed.length; _i2++) {
|
||||||
|
var part = parsed[_i2];
|
||||||
|
if (part.type !== "content") {
|
||||||
|
return part;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
function isInsideParagraphLoop(part) {
|
||||||
|
var firstMeaningfulPart = getFirstMeaningFulPart(part.subparsed);
|
||||||
|
return firstMeaningfulPart != null && firstMeaningfulPart.tag !== "w:t";
|
||||||
|
}
|
||||||
|
function getPageBreakIfApplies(part) {
|
||||||
|
return part.hasPageBreak && isInsideParagraphLoop(part) ? '<w:p><w:r><w:br w:type="page"/></w:r></w:p>' : "";
|
||||||
|
}
|
||||||
|
function isEnclosedByParagraphs(parsed) {
|
||||||
|
return parsed.length && isParagraphStart(parsed[0]) && isParagraphEnd(last(parsed));
|
||||||
|
}
|
||||||
|
function getOffset(chunk) {
|
||||||
|
return hasContent(chunk) ? 0 : chunk.length;
|
||||||
|
}
|
||||||
|
function addPageBreakAtEnd(subRendered) {
|
||||||
|
var j = subRendered.parts.length - 1;
|
||||||
|
if (subRendered.parts[j] === "</w:p>") {
|
||||||
|
subRendered.parts.splice(j, 0, '<w:r><w:br w:type="page"/></w:r>');
|
||||||
|
} else {
|
||||||
|
subRendered.parts.push('<w:p><w:r><w:br w:type="page"/></w:r></w:p>');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function addPageBreakAtBeginning(subRendered) {
|
||||||
|
subRendered.parts.unshift('<w:p><w:r><w:br w:type="page"/></w:r></w:p>');
|
||||||
|
}
|
||||||
|
function isContinuous(parts) {
|
||||||
|
return parts.some(function (part) {
|
||||||
|
return isTagStart("w:type", part) && part.value.indexOf("continuous") !== -1;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
function isNextPage(parts) {
|
||||||
|
return parts.some(function (part) {
|
||||||
|
return isTagStart("w:type", part) && part.value.indexOf('w:val="nextPage"') !== -1;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
function addSectionBefore(parts, sect) {
|
||||||
|
return pushArray(["<w:p><w:pPr>".concat(sect.map(function (_ref) {
|
||||||
|
var value = _ref.value;
|
||||||
|
return value;
|
||||||
|
}).join(""), "</w:pPr></w:p>")], parts);
|
||||||
|
}
|
||||||
|
function addContinuousType(parts) {
|
||||||
|
var stop = false;
|
||||||
|
var inSectPr = false;
|
||||||
|
var result = [];
|
||||||
|
for (var _i4 = 0; _i4 < parts.length; _i4++) {
|
||||||
|
var part = parts[_i4];
|
||||||
|
if (stop === false && startsWith(part, "<w:sectPr")) {
|
||||||
|
inSectPr = true;
|
||||||
|
}
|
||||||
|
if (inSectPr) {
|
||||||
|
if (startsWith(part, "<w:type")) {
|
||||||
|
stop = true;
|
||||||
|
}
|
||||||
|
if (stop === false && startsWith(part, "</w:sectPr")) {
|
||||||
|
result.push('<w:type w:val="continuous"/>');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
result.push(part);
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
function dropHeaderFooterRefs(parts) {
|
||||||
|
return parts.filter(function (text) {
|
||||||
|
return !startsWith(text, "<w:headerReference") && !startsWith(text, "<w:footerReference");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
function hasPageBreak(chunk) {
|
||||||
|
return chunk.some(function (part) {
|
||||||
|
return part.tag === "w:br" && part.value.indexOf('w:type="page"') !== -1;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
function hasImage(chunk) {
|
||||||
|
return chunk.some(function (_ref2) {
|
||||||
|
var tag = _ref2.tag;
|
||||||
|
return tag === "w:drawing";
|
||||||
|
});
|
||||||
|
}
|
||||||
|
function getSectPr(chunks) {
|
||||||
|
var collectSectPr = false;
|
||||||
|
var sectPrs = [];
|
||||||
|
for (var _i6 = 0; _i6 < chunks.length; _i6++) {
|
||||||
|
var part = chunks[_i6];
|
||||||
|
if (isTagStart("w:sectPr", part)) {
|
||||||
|
sectPrs.push([]);
|
||||||
|
collectSectPr = true;
|
||||||
|
}
|
||||||
|
if (collectSectPr) {
|
||||||
|
sectPrs[sectPrs.length - 1].push(part);
|
||||||
|
}
|
||||||
|
if (isTagEnd("w:sectPr", part)) {
|
||||||
|
collectSectPr = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return sectPrs;
|
||||||
|
}
|
||||||
|
function getSectPrHeaderFooterChangeCount(chunks) {
|
||||||
|
var collectSectPr = false;
|
||||||
|
var sectPrCount = 0;
|
||||||
|
for (var _i8 = 0; _i8 < chunks.length; _i8++) {
|
||||||
|
var part = chunks[_i8];
|
||||||
|
if (isTagStart("w:sectPr", part)) {
|
||||||
|
collectSectPr = true;
|
||||||
|
}
|
||||||
|
if (collectSectPr) {
|
||||||
|
if (part.tag === "w:headerReference" || part.tag === "w:footerReference") {
|
||||||
|
sectPrCount++;
|
||||||
|
collectSectPr = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (isTagEnd("w:sectPr", part)) {
|
||||||
|
collectSectPr = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return sectPrCount;
|
||||||
|
}
|
||||||
|
function getLastSectPr(parsed) {
|
||||||
|
var sectPr = [];
|
||||||
|
var inSectPr = false;
|
||||||
|
for (var i = parsed.length - 1; i >= 0; i--) {
|
||||||
|
var part = parsed[i];
|
||||||
|
if (isTagEnd("w:sectPr", part)) {
|
||||||
|
inSectPr = true;
|
||||||
|
}
|
||||||
|
if (isTagStart("w:sectPr", part)) {
|
||||||
|
sectPr.unshift(part.value);
|
||||||
|
inSectPr = false;
|
||||||
|
}
|
||||||
|
if (inSectPr) {
|
||||||
|
sectPr.unshift(part.value);
|
||||||
|
}
|
||||||
|
if (isParagraphStart(part)) {
|
||||||
|
if (sectPr.length > 0) {
|
||||||
|
return sectPr.join("");
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
var LoopModule = /*#__PURE__*/function () {
|
||||||
|
function LoopModule() {
|
||||||
|
_classCallCheck(this, LoopModule);
|
||||||
|
this.name = "LoopModule";
|
||||||
|
this.inXfrm = false;
|
||||||
|
this.totalSectPr = 0;
|
||||||
|
this.prefix = {
|
||||||
|
start: "#",
|
||||||
|
end: "/",
|
||||||
|
dash: /^-([^\s]+)\s(.+)/,
|
||||||
|
inverted: "^"
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return _createClass(LoopModule, [{
|
||||||
|
key: "optionsTransformer",
|
||||||
|
value: function optionsTransformer(opts, docxtemplater) {
|
||||||
|
this.docxtemplater = docxtemplater;
|
||||||
|
return opts;
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "preparse",
|
||||||
|
value: function preparse(parsed, _ref3) {
|
||||||
|
var contentType = _ref3.contentType;
|
||||||
|
if (filetypes.main.indexOf(contentType) !== -1) {
|
||||||
|
this.sects = getSectPr(parsed);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "matchers",
|
||||||
|
value: function matchers() {
|
||||||
|
var module = moduleName;
|
||||||
|
return [[this.prefix.start, module, {
|
||||||
|
expandTo: "auto",
|
||||||
|
location: "start",
|
||||||
|
inverted: false
|
||||||
|
}], [this.prefix.inverted, module, {
|
||||||
|
expandTo: "auto",
|
||||||
|
location: "start",
|
||||||
|
inverted: true
|
||||||
|
}], [this.prefix.end, module, {
|
||||||
|
location: "end"
|
||||||
|
}], [this.prefix.dash, module, function (_ref4) {
|
||||||
|
var _ref5 = _slicedToArray(_ref4, 3),
|
||||||
|
expandTo = _ref5[1],
|
||||||
|
value = _ref5[2];
|
||||||
|
return {
|
||||||
|
location: "start",
|
||||||
|
inverted: false,
|
||||||
|
expandTo: expandTo,
|
||||||
|
value: value
|
||||||
|
};
|
||||||
|
}]];
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "getTraits",
|
||||||
|
value: function getTraits(traitName, parsed) {
|
||||||
|
// Stryker disable all : because getTraits should disappear in v4
|
||||||
|
if (traitName !== "expandPair") {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// Stryker restore all
|
||||||
|
|
||||||
|
var tags = [];
|
||||||
|
for (var offset = 0, len = parsed.length; offset < len; offset++) {
|
||||||
|
var part = parsed[offset];
|
||||||
|
if (isModule(part, moduleName) && part.subparsed == null) {
|
||||||
|
tags.push({
|
||||||
|
part: part,
|
||||||
|
offset: offset
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return tags;
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "postparse",
|
||||||
|
value: function postparse(parsed, _ref6) {
|
||||||
|
var basePart = _ref6.basePart;
|
||||||
|
if (basePart && this.docxtemplater.fileType === "docx" && parsed.length > 0) {
|
||||||
|
basePart.sectPrCount = getSectPrHeaderFooterChangeCount(parsed);
|
||||||
|
this.totalSectPr += basePart.sectPrCount;
|
||||||
|
var sects = this.sects;
|
||||||
|
sects.some(function (sect, index) {
|
||||||
|
if (basePart.lIndex < sect[0].lIndex) {
|
||||||
|
if (index + 1 < sects.length && isContinuous(sects[index + 1])) {
|
||||||
|
basePart.addContinuousType = true;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (parsed[0].lIndex < sect[0].lIndex && sect[0].lIndex < basePart.lIndex) {
|
||||||
|
if (isNextPage(sects[index])) {
|
||||||
|
basePart.addNextPage = {
|
||||||
|
index: index
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
basePart.lastParagrapSectPr = getLastSectPr(parsed);
|
||||||
|
}
|
||||||
|
if (!basePart || basePart.expandTo !== "auto" || basePart.module !== moduleName || !isEnclosedByParagraphs(parsed)) {
|
||||||
|
return parsed;
|
||||||
|
}
|
||||||
|
basePart.paragraphLoop = true;
|
||||||
|
var level = 0;
|
||||||
|
var chunks = chunkBy(parsed, function (p) {
|
||||||
|
if (isParagraphStart(p)) {
|
||||||
|
level++;
|
||||||
|
if (level === 1) {
|
||||||
|
return "start";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (isParagraphEnd(p)) {
|
||||||
|
level--;
|
||||||
|
if (level === 0) {
|
||||||
|
return "end";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
});
|
||||||
|
var firstChunk = chunks[0];
|
||||||
|
var lastChunk = last(chunks);
|
||||||
|
var firstOffset = getOffset(firstChunk);
|
||||||
|
var lastOffset = getOffset(lastChunk);
|
||||||
|
basePart.hasPageBreakBeginning = hasPageBreak(firstChunk);
|
||||||
|
basePart.hasPageBreak = hasPageBreak(lastChunk);
|
||||||
|
if (hasImage(firstChunk)) {
|
||||||
|
firstOffset = 0;
|
||||||
|
}
|
||||||
|
if (hasImage(lastChunk)) {
|
||||||
|
lastOffset = 0;
|
||||||
|
}
|
||||||
|
return parsed.slice(firstOffset, parsed.length - lastOffset);
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "resolve",
|
||||||
|
value: function resolve(part, options) {
|
||||||
|
if (!isModule(part, moduleName)) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
var sm = options.scopeManager;
|
||||||
|
var promisedValue = sm.getValueAsync(part.value, {
|
||||||
|
part: part
|
||||||
|
});
|
||||||
|
var promises = [];
|
||||||
|
function loopOver(scope, i, length) {
|
||||||
|
var scopeManager = sm.createSubScopeManager(scope, part.value, i, part, length);
|
||||||
|
promises.push(options.resolve(_objectSpread(_objectSpread({}, options), {}, {
|
||||||
|
compiled: part.subparsed,
|
||||||
|
tags: {},
|
||||||
|
scopeManager: scopeManager
|
||||||
|
})));
|
||||||
|
}
|
||||||
|
var errorList = [];
|
||||||
|
return promisedValue.then(function (values) {
|
||||||
|
values !== null && values !== void 0 ? values : values = options.nullGetter(part);
|
||||||
|
return new Promise(function (resolve) {
|
||||||
|
if (values instanceof Promise) {
|
||||||
|
return values.then(function (values) {
|
||||||
|
if (values instanceof Array) {
|
||||||
|
Promise.all(values).then(resolve);
|
||||||
|
} else {
|
||||||
|
resolve(values);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
if (values instanceof Array) {
|
||||||
|
Promise.all(values).then(resolve);
|
||||||
|
} else {
|
||||||
|
resolve(values);
|
||||||
|
}
|
||||||
|
}).then(function (values) {
|
||||||
|
sm.loopOverValue(values, loopOver, part.inverted);
|
||||||
|
return Promise.all(promises).then(function (r) {
|
||||||
|
return r.map(function (_ref7) {
|
||||||
|
var resolved = _ref7.resolved,
|
||||||
|
errors = _ref7.errors;
|
||||||
|
pushArray(errorList, errors);
|
||||||
|
return resolved;
|
||||||
|
});
|
||||||
|
}).then(function (value) {
|
||||||
|
if (errorList.length > 0) {
|
||||||
|
throw errorList;
|
||||||
|
}
|
||||||
|
return value;
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "render",
|
||||||
|
value: function render(part, options) {
|
||||||
|
if (part.tag === "p:xfrm") {
|
||||||
|
this.inXfrm = part.position === "start";
|
||||||
|
}
|
||||||
|
if (part.tag === "a:ext" && this.inXfrm) {
|
||||||
|
this.lastExt = part;
|
||||||
|
return part;
|
||||||
|
}
|
||||||
|
if (!isModule(part, moduleName)) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
var totalValue = [];
|
||||||
|
var errors = [];
|
||||||
|
var heightOffset = 0;
|
||||||
|
var self = this;
|
||||||
|
var firstTag = part.subparsed[0];
|
||||||
|
var tagHeight = 0;
|
||||||
|
if ((firstTag === null || firstTag === void 0 ? void 0 : firstTag.tag) === "a:tr") {
|
||||||
|
tagHeight = +getSingleAttribute(firstTag.value, "h");
|
||||||
|
}
|
||||||
|
heightOffset -= tagHeight;
|
||||||
|
var a16RowIdOffset = 0;
|
||||||
|
var insideParagraphLoop = isInsideParagraphLoop(part);
|
||||||
|
function loopOver(scope, i, length) {
|
||||||
|
heightOffset += tagHeight;
|
||||||
|
var scopeManager = options.scopeManager.createSubScopeManager(scope, part.value, i, part, length);
|
||||||
|
for (var _i0 = 0, _part$subparsed2 = part.subparsed; _i0 < _part$subparsed2.length; _i0++) {
|
||||||
|
var pp = _part$subparsed2[_i0];
|
||||||
|
if (isTagStart("a16:rowId", pp)) {
|
||||||
|
var val = +getSingleAttribute(pp.value, "val") + a16RowIdOffset;
|
||||||
|
a16RowIdOffset = 1;
|
||||||
|
pp.value = setSingleAttribute(pp.value, "val", val);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
var subRendered = options.render(_objectSpread(_objectSpread({}, options), {}, {
|
||||||
|
compiled: part.subparsed,
|
||||||
|
tags: {},
|
||||||
|
scopeManager: scopeManager
|
||||||
|
}));
|
||||||
|
if (part.hasPageBreak && i === length - 1 && insideParagraphLoop) {
|
||||||
|
addPageBreakAtEnd(subRendered);
|
||||||
|
}
|
||||||
|
var isNotFirst = scopeManager.scopePathItem.some(function (i) {
|
||||||
|
return i !== 0;
|
||||||
|
});
|
||||||
|
if (isNotFirst) {
|
||||||
|
if (part.sectPrCount === 1) {
|
||||||
|
subRendered.parts = dropHeaderFooterRefs(subRendered.parts);
|
||||||
|
}
|
||||||
|
if (part.addContinuousType) {
|
||||||
|
subRendered.parts = addContinuousType(subRendered.parts);
|
||||||
|
}
|
||||||
|
} else if (part.addNextPage) {
|
||||||
|
subRendered.parts = addSectionBefore(subRendered.parts, self.sects[part.addNextPage.index]);
|
||||||
|
}
|
||||||
|
if (part.addNextPage) {
|
||||||
|
addPageBreakAtEnd(subRendered);
|
||||||
|
}
|
||||||
|
if (part.hasPageBreakBeginning && insideParagraphLoop) {
|
||||||
|
addPageBreakAtBeginning(subRendered);
|
||||||
|
}
|
||||||
|
for (var _i10 = 0, _subRendered$parts2 = subRendered.parts; _i10 < _subRendered$parts2.length; _i10++) {
|
||||||
|
var _val = _subRendered$parts2[_i10];
|
||||||
|
totalValue.push(_val);
|
||||||
|
}
|
||||||
|
pushArray(errors, subRendered.errors);
|
||||||
|
}
|
||||||
|
var value = options.scopeManager.getValue(part.value, {
|
||||||
|
part: part
|
||||||
|
});
|
||||||
|
value !== null && value !== void 0 ? value : value = options.nullGetter(part);
|
||||||
|
var result = options.scopeManager.loopOverValue(value, loopOver, part.inverted);
|
||||||
|
// if the loop is showing empty content
|
||||||
|
if (result === false) {
|
||||||
|
if (part.lastParagrapSectPr) {
|
||||||
|
if (part.paragraphLoop) {
|
||||||
|
return {
|
||||||
|
value: "<w:p><w:pPr>".concat(part.lastParagrapSectPr, "</w:pPr></w:p>")
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
value: "</w:t></w:r></w:p><w:p><w:pPr>".concat(part.lastParagrapSectPr, "</w:pPr><w:r><w:t>")
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
value: getPageBreakIfApplies(part) || "",
|
||||||
|
errors: errors
|
||||||
|
};
|
||||||
|
}
|
||||||
|
if (heightOffset !== 0) {
|
||||||
|
var cy = +getSingleAttribute(this.lastExt.value, "cy");
|
||||||
|
/*
|
||||||
|
* We do edit the value of a previous result here
|
||||||
|
* #edit-value-backwards
|
||||||
|
*/
|
||||||
|
this.lastExt.value = setSingleAttribute(this.lastExt.value, "cy", cy + heightOffset);
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
value: options.joinUncorrupt(totalValue, _objectSpread(_objectSpread({}, options), {}, {
|
||||||
|
basePart: part
|
||||||
|
})),
|
||||||
|
errors: errors
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}]);
|
||||||
|
}();
|
||||||
|
module.exports = function () {
|
||||||
|
return wrapper(new LoopModule());
|
||||||
|
};
|
||||||
108
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/modules/rawxml.js
generated
vendored
Normal file
108
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/modules/rawxml.js
generated
vendored
Normal file
@@ -0,0 +1,108 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
function _typeof(o) { "@babel/helpers - typeof"; return _typeof = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function (o) { return typeof o; } : function (o) { return o && "function" == typeof Symbol && o.constructor === Symbol && o !== Symbol.prototype ? "symbol" : typeof o; }, _typeof(o); }
|
||||||
|
function _classCallCheck(a, n) { if (!(a instanceof n)) throw new TypeError("Cannot call a class as a function"); }
|
||||||
|
function _defineProperties(e, r) { for (var t = 0; t < r.length; t++) { var o = r[t]; o.enumerable = o.enumerable || !1, o.configurable = !0, "value" in o && (o.writable = !0), Object.defineProperty(e, _toPropertyKey(o.key), o); } }
|
||||||
|
function _createClass(e, r, t) { return r && _defineProperties(e.prototype, r), t && _defineProperties(e, t), Object.defineProperty(e, "prototype", { writable: !1 }), e; }
|
||||||
|
function _toPropertyKey(t) { var i = _toPrimitive(t, "string"); return "symbol" == _typeof(i) ? i : i + ""; }
|
||||||
|
function _toPrimitive(t, r) { if ("object" != _typeof(t) || !t) return t; var e = t[Symbol.toPrimitive]; if (void 0 !== e) { var i = e.call(t, r || "default"); if ("object" != _typeof(i)) return i; throw new TypeError("@@toPrimitive must return a primitive value."); } return ("string" === r ? String : Number)(t); }
|
||||||
|
var traits = require("../traits.js");
|
||||||
|
var _require = require("../doc-utils.js"),
|
||||||
|
isContent = _require.isContent;
|
||||||
|
var _require2 = require("../errors.js"),
|
||||||
|
throwRawTagShouldBeOnlyTextInParagraph = _require2.throwRawTagShouldBeOnlyTextInParagraph,
|
||||||
|
getInvalidRawXMLValueException = _require2.getInvalidRawXMLValueException;
|
||||||
|
var wrapper = require("../module-wrapper.js");
|
||||||
|
var moduleName = "rawxml";
|
||||||
|
function getInner(_ref) {
|
||||||
|
var part = _ref.part,
|
||||||
|
left = _ref.left,
|
||||||
|
right = _ref.right,
|
||||||
|
postparsed = _ref.postparsed,
|
||||||
|
index = _ref.index;
|
||||||
|
var paragraphParts = postparsed.slice(left + 1, right);
|
||||||
|
for (var i = 0, len = paragraphParts.length; i < len; i++) {
|
||||||
|
if (i === index - left - 1) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
var p = paragraphParts[i];
|
||||||
|
if (isContent(p)) {
|
||||||
|
throwRawTagShouldBeOnlyTextInParagraph({
|
||||||
|
paragraphParts: paragraphParts,
|
||||||
|
part: part
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return part;
|
||||||
|
}
|
||||||
|
var RawXmlModule = /*#__PURE__*/function () {
|
||||||
|
function RawXmlModule() {
|
||||||
|
_classCallCheck(this, RawXmlModule);
|
||||||
|
this.name = "RawXmlModule";
|
||||||
|
this.prefix = "@";
|
||||||
|
}
|
||||||
|
return _createClass(RawXmlModule, [{
|
||||||
|
key: "optionsTransformer",
|
||||||
|
value: function optionsTransformer(options, docxtemplater) {
|
||||||
|
this.fileTypeConfig = docxtemplater.fileTypeConfig;
|
||||||
|
return options;
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "matchers",
|
||||||
|
value: function matchers() {
|
||||||
|
return [[this.prefix, moduleName]];
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "postparse",
|
||||||
|
value: function postparse(postparsed) {
|
||||||
|
return traits.expandToOne(postparsed, {
|
||||||
|
moduleName: moduleName,
|
||||||
|
getInner: getInner,
|
||||||
|
expandTo: this.fileTypeConfig.tagRawXml,
|
||||||
|
error: {
|
||||||
|
message: "Raw tag not in paragraph",
|
||||||
|
id: "raw_tag_outerxml_invalid",
|
||||||
|
explanation: function explanation(part) {
|
||||||
|
return "The tag \"".concat(part.value, "\" is not inside a paragraph, putting raw tags inside an inline loop is disallowed.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "render",
|
||||||
|
value: function render(part, options) {
|
||||||
|
if (part.module !== moduleName) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
var value;
|
||||||
|
var errors = [];
|
||||||
|
try {
|
||||||
|
value = options.scopeManager.getValue(part.value, {
|
||||||
|
part: part
|
||||||
|
});
|
||||||
|
value !== null && value !== void 0 ? value : value = options.nullGetter(part);
|
||||||
|
} catch (e) {
|
||||||
|
errors.push(e);
|
||||||
|
return {
|
||||||
|
errors: errors
|
||||||
|
};
|
||||||
|
}
|
||||||
|
value = value ? value : "";
|
||||||
|
if (typeof value === "string") {
|
||||||
|
return {
|
||||||
|
value: value
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
errors: [getInvalidRawXMLValueException({
|
||||||
|
tag: part.value,
|
||||||
|
value: value,
|
||||||
|
offset: part.offset
|
||||||
|
})]
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}]);
|
||||||
|
}();
|
||||||
|
module.exports = function () {
|
||||||
|
return wrapper(new RawXmlModule());
|
||||||
|
};
|
||||||
178
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/modules/render.js
generated
vendored
Normal file
178
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/modules/render.js
generated
vendored
Normal file
@@ -0,0 +1,178 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
function _typeof(o) { "@babel/helpers - typeof"; return _typeof = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function (o) { return typeof o; } : function (o) { return o && "function" == typeof Symbol && o.constructor === Symbol && o !== Symbol.prototype ? "symbol" : typeof o; }, _typeof(o); }
|
||||||
|
function _classCallCheck(a, n) { if (!(a instanceof n)) throw new TypeError("Cannot call a class as a function"); }
|
||||||
|
function _defineProperties(e, r) { for (var t = 0; t < r.length; t++) { var o = r[t]; o.enumerable = o.enumerable || !1, o.configurable = !0, "value" in o && (o.writable = !0), Object.defineProperty(e, _toPropertyKey(o.key), o); } }
|
||||||
|
function _createClass(e, r, t) { return r && _defineProperties(e.prototype, r), t && _defineProperties(e, t), Object.defineProperty(e, "prototype", { writable: !1 }), e; }
|
||||||
|
function _toPropertyKey(t) { var i = _toPrimitive(t, "string"); return "symbol" == _typeof(i) ? i : i + ""; }
|
||||||
|
function _toPrimitive(t, r) { if ("object" != _typeof(t) || !t) return t; var e = t[Symbol.toPrimitive]; if (void 0 !== e) { var i = e.call(t, r || "default"); if ("object" != _typeof(i)) return i; throw new TypeError("@@toPrimitive must return a primitive value."); } return ("string" === r ? String : Number)(t); }
|
||||||
|
var wrapper = require("../module-wrapper.js");
|
||||||
|
var _require = require("../errors.js"),
|
||||||
|
getScopeCompilationError = _require.getScopeCompilationError,
|
||||||
|
getCorruptCharactersException = _require.getCorruptCharactersException;
|
||||||
|
var _require2 = require("../doc-utils.js"),
|
||||||
|
utf8ToWord = _require2.utf8ToWord,
|
||||||
|
hasCorruptCharacters = _require2.hasCorruptCharacters,
|
||||||
|
removeCorruptCharacters = _require2.removeCorruptCharacters;
|
||||||
|
var _require3 = require("../content-types.js"),
|
||||||
|
settingsContentType = _require3.settingsContentType,
|
||||||
|
coreContentType = _require3.coreContentType,
|
||||||
|
appContentType = _require3.appContentType,
|
||||||
|
customContentType = _require3.customContentType;
|
||||||
|
var NON_LINE_BREAKS_CONTENT_TYPE = [settingsContentType, coreContentType, appContentType, customContentType];
|
||||||
|
var ftprefix = {
|
||||||
|
docx: "w",
|
||||||
|
pptx: "a"
|
||||||
|
};
|
||||||
|
var Render = /*#__PURE__*/function () {
|
||||||
|
function Render() {
|
||||||
|
_classCallCheck(this, Render);
|
||||||
|
this.name = "Render";
|
||||||
|
this.recordRun = false;
|
||||||
|
this.recordedRun = [];
|
||||||
|
}
|
||||||
|
return _createClass(Render, [{
|
||||||
|
key: "optionsTransformer",
|
||||||
|
value: function optionsTransformer(options, docxtemplater) {
|
||||||
|
this.docxtemplater = docxtemplater;
|
||||||
|
this.brTag = docxtemplater.fileType === "docx" ? "<w:r><w:br/></w:r>" : "<a:br/>";
|
||||||
|
this.prefix = ftprefix[docxtemplater.fileType];
|
||||||
|
this.runStartTag = "".concat(this.prefix, ":r");
|
||||||
|
this.runPropsStartTag = "".concat(this.prefix, ":rPr");
|
||||||
|
return options;
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "set",
|
||||||
|
value: function set(obj) {
|
||||||
|
if (obj.compiled) {
|
||||||
|
this.compiled = obj.compiled;
|
||||||
|
}
|
||||||
|
if (obj.data != null) {
|
||||||
|
this.data = obj.data;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "getRenderedMap",
|
||||||
|
value: function getRenderedMap(mapper) {
|
||||||
|
for (var from in this.compiled) {
|
||||||
|
mapper[from] = {
|
||||||
|
from: from,
|
||||||
|
data: this.data
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return mapper;
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "postparse",
|
||||||
|
value: function postparse(postparsed, options) {
|
||||||
|
var errors = [];
|
||||||
|
for (var _i2 = 0; _i2 < postparsed.length; _i2++) {
|
||||||
|
var p = postparsed[_i2];
|
||||||
|
if (p.type === "placeholder") {
|
||||||
|
var tag = p.value;
|
||||||
|
try {
|
||||||
|
options.cachedParsers[p.lIndex] = this.docxtemplater.parser(tag, {
|
||||||
|
tag: p
|
||||||
|
});
|
||||||
|
} catch (rootError) {
|
||||||
|
errors.push(getScopeCompilationError({
|
||||||
|
tag: tag,
|
||||||
|
rootError: rootError,
|
||||||
|
offset: p.offset
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
postparsed: postparsed,
|
||||||
|
errors: errors
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "render",
|
||||||
|
value: function render(part, _ref) {
|
||||||
|
var contentType = _ref.contentType,
|
||||||
|
scopeManager = _ref.scopeManager,
|
||||||
|
linebreaks = _ref.linebreaks,
|
||||||
|
nullGetter = _ref.nullGetter,
|
||||||
|
fileType = _ref.fileType,
|
||||||
|
stripInvalidXMLChars = _ref.stripInvalidXMLChars;
|
||||||
|
if (NON_LINE_BREAKS_CONTENT_TYPE.indexOf(contentType) !== -1) {
|
||||||
|
// Fixes issue tested in #docprops-linebreak
|
||||||
|
linebreaks = false;
|
||||||
|
}
|
||||||
|
if (linebreaks) {
|
||||||
|
this.recordRuns(part);
|
||||||
|
}
|
||||||
|
if (part.type !== "placeholder" || part.module) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
var value;
|
||||||
|
try {
|
||||||
|
value = scopeManager.getValue(part.value, {
|
||||||
|
part: part
|
||||||
|
});
|
||||||
|
} catch (e) {
|
||||||
|
return {
|
||||||
|
errors: [e]
|
||||||
|
};
|
||||||
|
}
|
||||||
|
value !== null && value !== void 0 ? value : value = nullGetter(part);
|
||||||
|
if (typeof value === "string") {
|
||||||
|
if (stripInvalidXMLChars) {
|
||||||
|
value = removeCorruptCharacters(value);
|
||||||
|
} else if (["docx", "pptx", "xlsx"].indexOf(fileType) !== -1 && hasCorruptCharacters(value)) {
|
||||||
|
return {
|
||||||
|
errors: [getCorruptCharactersException({
|
||||||
|
tag: part.value,
|
||||||
|
value: value,
|
||||||
|
offset: part.offset
|
||||||
|
})]
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (fileType === "text") {
|
||||||
|
return {
|
||||||
|
value: value
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
value: linebreaks && typeof value === "string" ? this.renderLineBreaks(value) : utf8ToWord(value)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "recordRuns",
|
||||||
|
value: function recordRuns(part) {
|
||||||
|
if (part.tag === this.runStartTag) {
|
||||||
|
this.recordedRun = "";
|
||||||
|
} else if (part.tag === this.runPropsStartTag) {
|
||||||
|
if (part.position === "start") {
|
||||||
|
this.recordRun = true;
|
||||||
|
this.recordedRun += part.value;
|
||||||
|
}
|
||||||
|
if (part.position === "end" || part.position === "selfclosing") {
|
||||||
|
this.recordedRun += part.value;
|
||||||
|
this.recordRun = false;
|
||||||
|
}
|
||||||
|
} else if (this.recordRun) {
|
||||||
|
this.recordedRun += part.value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "renderLineBreaks",
|
||||||
|
value: function renderLineBreaks(value) {
|
||||||
|
var result = [];
|
||||||
|
var lines = value.split("\n");
|
||||||
|
for (var i = 0, len = lines.length; i < len; i++) {
|
||||||
|
result.push(utf8ToWord(lines[i]));
|
||||||
|
if (i < lines.length - 1) {
|
||||||
|
result.push("</".concat(this.prefix, ":t></").concat(this.prefix, ":r>").concat(this.brTag, "<").concat(this.prefix, ":r>").concat(this.recordedRun, "<").concat(this.prefix, ":t").concat(this.docxtemplater.fileType === "docx" ? ' xml:space="preserve"' : "", ">"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}]);
|
||||||
|
}();
|
||||||
|
module.exports = function () {
|
||||||
|
return wrapper(new Render());
|
||||||
|
};
|
||||||
110
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/modules/space-preserve.js
generated
vendored
Normal file
110
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/modules/space-preserve.js
generated
vendored
Normal file
@@ -0,0 +1,110 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
function _typeof(o) { "@babel/helpers - typeof"; return _typeof = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function (o) { return typeof o; } : function (o) { return o && "function" == typeof Symbol && o.constructor === Symbol && o !== Symbol.prototype ? "symbol" : typeof o; }, _typeof(o); }
|
||||||
|
function _classCallCheck(a, n) { if (!(a instanceof n)) throw new TypeError("Cannot call a class as a function"); }
|
||||||
|
function _defineProperties(e, r) { for (var t = 0; t < r.length; t++) { var o = r[t]; o.enumerable = o.enumerable || !1, o.configurable = !0, "value" in o && (o.writable = !0), Object.defineProperty(e, _toPropertyKey(o.key), o); } }
|
||||||
|
function _createClass(e, r, t) { return r && _defineProperties(e.prototype, r), t && _defineProperties(e, t), Object.defineProperty(e, "prototype", { writable: !1 }), e; }
|
||||||
|
function _toPropertyKey(t) { var i = _toPrimitive(t, "string"); return "symbol" == _typeof(i) ? i : i + ""; }
|
||||||
|
function _toPrimitive(t, r) { if ("object" != _typeof(t) || !t) return t; var e = t[Symbol.toPrimitive]; if (void 0 !== e) { var i = e.call(t, r || "default"); if ("object" != _typeof(i)) return i; throw new TypeError("@@toPrimitive must return a primitive value."); } return ("string" === r ? String : Number)(t); }
|
||||||
|
var wrapper = require("../module-wrapper.js");
|
||||||
|
var _require = require("../doc-utils.js"),
|
||||||
|
isTextStart = _require.isTextStart,
|
||||||
|
isTextEnd = _require.isTextEnd,
|
||||||
|
endsWith = _require.endsWith,
|
||||||
|
startsWith = _require.startsWith,
|
||||||
|
pushArray = _require.pushArray;
|
||||||
|
var wTpreserve = '<w:t xml:space="preserve">';
|
||||||
|
var wTpreservelen = wTpreserve.length;
|
||||||
|
var wtEnd = "</w:t>";
|
||||||
|
var wtEndlen = wtEnd.length;
|
||||||
|
function isWtStart(part) {
|
||||||
|
return isTextStart(part) && part.tag === "w:t";
|
||||||
|
}
|
||||||
|
function addXMLPreserve(chunk, index) {
|
||||||
|
var tag = chunk[index].value;
|
||||||
|
if (chunk[index + 1].value === "</w:t>") {
|
||||||
|
return tag;
|
||||||
|
}
|
||||||
|
if (tag.indexOf('xml:space="preserve"') !== -1) {
|
||||||
|
return tag;
|
||||||
|
}
|
||||||
|
return tag.substr(0, tag.length - 1) + ' xml:space="preserve">';
|
||||||
|
}
|
||||||
|
function isInsideLoop(meta, chunk) {
|
||||||
|
return meta && meta.basePart && chunk.length > 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// This module is used only for `docx` files
|
||||||
|
var SpacePreserve = /*#__PURE__*/function () {
|
||||||
|
function SpacePreserve() {
|
||||||
|
_classCallCheck(this, SpacePreserve);
|
||||||
|
this.name = "SpacePreserveModule";
|
||||||
|
}
|
||||||
|
return _createClass(SpacePreserve, [{
|
||||||
|
key: "postparse",
|
||||||
|
value: function postparse(postparsed, meta) {
|
||||||
|
var chunk = [],
|
||||||
|
inTextTag = false,
|
||||||
|
endLindex = 0,
|
||||||
|
lastTextTag = 0;
|
||||||
|
function isStartingPlaceHolder(part, chunk) {
|
||||||
|
return part.type === "placeholder" && chunk.length > 1;
|
||||||
|
}
|
||||||
|
var result = postparsed.reduce(function (postparsed, part) {
|
||||||
|
if (isWtStart(part)) {
|
||||||
|
inTextTag = true;
|
||||||
|
lastTextTag = chunk.length;
|
||||||
|
}
|
||||||
|
if (!inTextTag) {
|
||||||
|
postparsed.push(part);
|
||||||
|
return postparsed;
|
||||||
|
}
|
||||||
|
chunk.push(part);
|
||||||
|
if (isInsideLoop(meta, chunk)) {
|
||||||
|
endLindex = meta.basePart.endLindex;
|
||||||
|
chunk[0].value = addXMLPreserve(chunk, 0);
|
||||||
|
}
|
||||||
|
if (isStartingPlaceHolder(part, chunk)) {
|
||||||
|
chunk[lastTextTag].value = addXMLPreserve(chunk, lastTextTag);
|
||||||
|
endLindex = part.endLindex;
|
||||||
|
}
|
||||||
|
if (isTextEnd(part) && part.lIndex > endLindex) {
|
||||||
|
if (endLindex !== 0) {
|
||||||
|
chunk[lastTextTag].value = addXMLPreserve(chunk, lastTextTag);
|
||||||
|
}
|
||||||
|
pushArray(postparsed, chunk);
|
||||||
|
chunk = [];
|
||||||
|
inTextTag = false;
|
||||||
|
endLindex = 0;
|
||||||
|
lastTextTag = 0;
|
||||||
|
}
|
||||||
|
return postparsed;
|
||||||
|
}, []);
|
||||||
|
pushArray(result, chunk);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "postrender",
|
||||||
|
value: function postrender(parts) {
|
||||||
|
var lastNonEmpty = "";
|
||||||
|
var lastNonEmptyIndex = 0;
|
||||||
|
for (var i = 0, len = parts.length; i < len; i++) {
|
||||||
|
var p = parts[i];
|
||||||
|
if (p === "") {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (endsWith(lastNonEmpty, wTpreserve) && startsWith(p, wtEnd)) {
|
||||||
|
parts[lastNonEmptyIndex] = lastNonEmpty.substr(0, lastNonEmpty.length - wTpreservelen) + "<w:t/>";
|
||||||
|
p = p.substr(wtEndlen);
|
||||||
|
}
|
||||||
|
lastNonEmpty = p;
|
||||||
|
lastNonEmptyIndex = i;
|
||||||
|
parts[i] = p;
|
||||||
|
}
|
||||||
|
return parts;
|
||||||
|
}
|
||||||
|
}]);
|
||||||
|
}();
|
||||||
|
module.exports = function () {
|
||||||
|
return wrapper(new SpacePreserve());
|
||||||
|
};
|
||||||
203
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/parser.js
generated
vendored
Normal file
203
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/parser.js
generated
vendored
Normal file
@@ -0,0 +1,203 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
function _typeof(o) { "@babel/helpers - typeof"; return _typeof = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function (o) { return typeof o; } : function (o) { return o && "function" == typeof Symbol && o.constructor === Symbol && o !== Symbol.prototype ? "symbol" : typeof o; }, _typeof(o); }
|
||||||
|
function ownKeys(e, r) { var t = Object.keys(e); if (Object.getOwnPropertySymbols) { var o = Object.getOwnPropertySymbols(e); r && (o = o.filter(function (r) { return Object.getOwnPropertyDescriptor(e, r).enumerable; })), t.push.apply(t, o); } return t; }
|
||||||
|
function _objectSpread(e) { for (var r = 1; r < arguments.length; r++) { var t = null != arguments[r] ? arguments[r] : {}; r % 2 ? ownKeys(Object(t), !0).forEach(function (r) { _defineProperty(e, r, t[r]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(e, Object.getOwnPropertyDescriptors(t)) : ownKeys(Object(t)).forEach(function (r) { Object.defineProperty(e, r, Object.getOwnPropertyDescriptor(t, r)); }); } return e; }
|
||||||
|
function _defineProperty(e, r, t) { return (r = _toPropertyKey(r)) in e ? Object.defineProperty(e, r, { value: t, enumerable: !0, configurable: !0, writable: !0 }) : e[r] = t, e; }
|
||||||
|
function _toPropertyKey(t) { var i = _toPrimitive(t, "string"); return "symbol" == _typeof(i) ? i : i + ""; }
|
||||||
|
function _toPrimitive(t, r) { if ("object" != _typeof(t) || !t) return t; var e = t[Symbol.toPrimitive]; if (void 0 !== e) { var i = e.call(t, r || "default"); if ("object" != _typeof(i)) return i; throw new TypeError("@@toPrimitive must return a primitive value."); } return ("string" === r ? String : Number)(t); }
|
||||||
|
function _slicedToArray(r, e) { return _arrayWithHoles(r) || _iterableToArrayLimit(r, e) || _unsupportedIterableToArray(r, e) || _nonIterableRest(); }
|
||||||
|
function _nonIterableRest() { throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); }
|
||||||
|
function _unsupportedIterableToArray(r, a) { if (r) { if ("string" == typeof r) return _arrayLikeToArray(r, a); var t = {}.toString.call(r).slice(8, -1); return "Object" === t && r.constructor && (t = r.constructor.name), "Map" === t || "Set" === t ? Array.from(r) : "Arguments" === t || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(t) ? _arrayLikeToArray(r, a) : void 0; } }
|
||||||
|
function _arrayLikeToArray(r, a) { (null == a || a > r.length) && (a = r.length); for (var e = 0, n = Array(a); e < a; e++) n[e] = r[e]; return n; }
|
||||||
|
function _iterableToArrayLimit(r, l) { var t = null == r ? null : "undefined" != typeof Symbol && r[Symbol.iterator] || r["@@iterator"]; if (null != t) { var e, n, i, u, a = [], f = !0, o = !1; try { if (i = (t = t.call(r)).next, 0 === l) { if (Object(t) !== t) return; f = !1; } else for (; !(f = (e = i.call(t)).done) && (a.push(e.value), a.length !== l); f = !0); } catch (r) { o = !0, n = r; } finally { try { if (!f && null != t["return"] && (u = t["return"](), Object(u) !== u)) return; } finally { if (o) throw n; } } return a; } }
|
||||||
|
function _arrayWithHoles(r) { if (Array.isArray(r)) return r; }
|
||||||
|
var _require = require("./doc-utils.js"),
|
||||||
|
wordToUtf8 = _require.wordToUtf8,
|
||||||
|
pushArray = _require.pushArray;
|
||||||
|
var _require2 = require("./prefix-matcher.js"),
|
||||||
|
match = _require2.match,
|
||||||
|
getValue = _require2.getValue,
|
||||||
|
getValues = _require2.getValues;
|
||||||
|
function getMatchers(modules, options) {
|
||||||
|
var allMatchers = [];
|
||||||
|
for (var _i2 = 0; _i2 < modules.length; _i2++) {
|
||||||
|
var _module = modules[_i2];
|
||||||
|
if (_module.matchers) {
|
||||||
|
var matchers = _module.matchers(options);
|
||||||
|
if (!(matchers instanceof Array)) {
|
||||||
|
throw new Error("module matcher returns a non array");
|
||||||
|
}
|
||||||
|
pushArray(allMatchers, matchers);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return allMatchers;
|
||||||
|
}
|
||||||
|
function getMatches(matchers, placeHolderContent, options) {
|
||||||
|
var matches = [];
|
||||||
|
for (var _i4 = 0; _i4 < matchers.length; _i4++) {
|
||||||
|
var matcher = matchers[_i4];
|
||||||
|
var _matcher = _slicedToArray(matcher, 2),
|
||||||
|
prefix = _matcher[0],
|
||||||
|
_module2 = _matcher[1];
|
||||||
|
var properties = matcher[2] || {};
|
||||||
|
if (options.match(prefix, placeHolderContent)) {
|
||||||
|
var values = options.getValues(prefix, placeHolderContent);
|
||||||
|
if (typeof properties === "function") {
|
||||||
|
properties = properties(values);
|
||||||
|
}
|
||||||
|
if (!properties.value) {
|
||||||
|
var _values = _slicedToArray(values, 2);
|
||||||
|
properties.value = _values[1];
|
||||||
|
}
|
||||||
|
matches.push(_objectSpread({
|
||||||
|
type: "placeholder",
|
||||||
|
prefix: prefix,
|
||||||
|
module: _module2,
|
||||||
|
onMatch: properties.onMatch,
|
||||||
|
priority: properties.priority
|
||||||
|
}, properties));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return matches;
|
||||||
|
}
|
||||||
|
function moduleParse(placeHolderContent, options) {
|
||||||
|
var modules = options.modules,
|
||||||
|
startOffset = options.startOffset;
|
||||||
|
var endLindex = options.lIndex;
|
||||||
|
var moduleParsed;
|
||||||
|
options.offset = startOffset;
|
||||||
|
options.match = match;
|
||||||
|
options.getValue = getValue;
|
||||||
|
options.getValues = getValues;
|
||||||
|
var matchers = getMatchers(modules, options);
|
||||||
|
var matches = getMatches(matchers, placeHolderContent, options);
|
||||||
|
if (matches.length > 0) {
|
||||||
|
var bestMatch = null;
|
||||||
|
for (var _i6 = 0; _i6 < matches.length; _i6++) {
|
||||||
|
var _match = matches[_i6];
|
||||||
|
_match.priority || (_match.priority = -_match.value.length);
|
||||||
|
if (!bestMatch || _match.priority > bestMatch.priority) {
|
||||||
|
bestMatch = _match;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
bestMatch.offset = startOffset;
|
||||||
|
delete bestMatch.priority;
|
||||||
|
bestMatch.endLindex = endLindex;
|
||||||
|
bestMatch.lIndex = endLindex;
|
||||||
|
bestMatch.raw = placeHolderContent;
|
||||||
|
if (bestMatch.onMatch) {
|
||||||
|
bestMatch.onMatch(bestMatch);
|
||||||
|
}
|
||||||
|
delete bestMatch.onMatch;
|
||||||
|
delete bestMatch.prefix;
|
||||||
|
return bestMatch;
|
||||||
|
}
|
||||||
|
for (var _i8 = 0; _i8 < modules.length; _i8++) {
|
||||||
|
var _module3 = modules[_i8];
|
||||||
|
moduleParsed = _module3.parse(placeHolderContent, options);
|
||||||
|
if (moduleParsed) {
|
||||||
|
moduleParsed.offset = startOffset;
|
||||||
|
moduleParsed.endLindex = endLindex;
|
||||||
|
moduleParsed.lIndex = endLindex;
|
||||||
|
moduleParsed.raw = placeHolderContent;
|
||||||
|
return moduleParsed;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
type: "placeholder",
|
||||||
|
value: placeHolderContent,
|
||||||
|
offset: startOffset,
|
||||||
|
endLindex: endLindex,
|
||||||
|
lIndex: endLindex
|
||||||
|
};
|
||||||
|
}
|
||||||
|
var parser = {
|
||||||
|
preparse: function preparse(parsed, modules, options) {
|
||||||
|
function preparse(parsed, options) {
|
||||||
|
for (var _i0 = 0; _i0 < modules.length; _i0++) {
|
||||||
|
var _module4 = modules[_i0];
|
||||||
|
parsed = _module4.preparse(parsed, options) || parsed;
|
||||||
|
}
|
||||||
|
return parsed;
|
||||||
|
}
|
||||||
|
return preparse(parsed, options);
|
||||||
|
},
|
||||||
|
parse: function parse(lexed, modules, options) {
|
||||||
|
var inPlaceHolder = false;
|
||||||
|
var placeHolderContent = "";
|
||||||
|
var startOffset;
|
||||||
|
var tailParts = [];
|
||||||
|
var droppedTags = options.fileTypeConfig.droppedTagsInsidePlaceholder || [];
|
||||||
|
return lexed.reduce(function (parsed, token) {
|
||||||
|
if (token.type === "delimiter") {
|
||||||
|
inPlaceHolder = token.position === "start";
|
||||||
|
if (token.position === "end") {
|
||||||
|
options.parse = function (placeHolderContent) {
|
||||||
|
return moduleParse(placeHolderContent, _objectSpread(_objectSpread(_objectSpread({}, options), token), {}, {
|
||||||
|
startOffset: startOffset,
|
||||||
|
modules: modules
|
||||||
|
}));
|
||||||
|
};
|
||||||
|
parsed.push(options.parse(wordToUtf8(placeHolderContent)));
|
||||||
|
pushArray(parsed, tailParts);
|
||||||
|
tailParts = [];
|
||||||
|
}
|
||||||
|
if (token.position === "start") {
|
||||||
|
tailParts = [];
|
||||||
|
startOffset = token.offset;
|
||||||
|
}
|
||||||
|
placeHolderContent = "";
|
||||||
|
return parsed;
|
||||||
|
}
|
||||||
|
if (!inPlaceHolder) {
|
||||||
|
parsed.push(token);
|
||||||
|
return parsed;
|
||||||
|
}
|
||||||
|
if (token.type !== "content" || token.position !== "insidetag") {
|
||||||
|
if (droppedTags.indexOf(token.tag) !== -1) {
|
||||||
|
return parsed;
|
||||||
|
}
|
||||||
|
tailParts.push(token);
|
||||||
|
return parsed;
|
||||||
|
}
|
||||||
|
placeHolderContent += token.value;
|
||||||
|
return parsed;
|
||||||
|
}, []);
|
||||||
|
},
|
||||||
|
postparse: function postparse(postparsed, modules, options) {
|
||||||
|
function getTraits(traitName, postparsed) {
|
||||||
|
return modules.map(function (module) {
|
||||||
|
return module.getTraits(traitName, postparsed);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
var errors = [];
|
||||||
|
function _postparse(postparsed, options) {
|
||||||
|
var newPostparsed = postparsed;
|
||||||
|
for (var _i10 = 0; _i10 < modules.length; _i10++) {
|
||||||
|
var _module5 = modules[_i10];
|
||||||
|
var postparseResult = _module5.postparse(newPostparsed, _objectSpread(_objectSpread({}, options), {}, {
|
||||||
|
postparse: function postparse(parsed, opts) {
|
||||||
|
return _postparse(parsed, _objectSpread(_objectSpread({}, options), opts));
|
||||||
|
},
|
||||||
|
getTraits: getTraits
|
||||||
|
}));
|
||||||
|
if (postparseResult == null) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (postparseResult.errors) {
|
||||||
|
pushArray(errors, postparseResult.errors);
|
||||||
|
newPostparsed = postparseResult.postparsed;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
newPostparsed = postparseResult;
|
||||||
|
}
|
||||||
|
return newPostparsed;
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
postparsed: _postparse(postparsed, options),
|
||||||
|
errors: errors
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
module.exports = parser;
|
||||||
114
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/postrender.js
generated
vendored
Normal file
114
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/postrender.js
generated
vendored
Normal file
@@ -0,0 +1,114 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Convert string to array (typed, when possible)
|
||||||
|
* Stryker disable all : because this is a utility function that was copied
|
||||||
|
* from
|
||||||
|
* https://github.com/open-xml-templating/pizzip/blob/34a840553c604980859dc6d0dcd1f89b6e5527b3/es6/utf8.js#L33
|
||||||
|
*/
|
||||||
|
function string2buf(str) {
|
||||||
|
var c,
|
||||||
|
c2,
|
||||||
|
mPos,
|
||||||
|
i,
|
||||||
|
bufLen = 0;
|
||||||
|
var strLen = str.length;
|
||||||
|
|
||||||
|
// count binary size
|
||||||
|
for (mPos = 0; mPos < strLen; mPos++) {
|
||||||
|
c = str.charCodeAt(mPos);
|
||||||
|
if ((c & 0xfc00) === 0xd800 && mPos + 1 < strLen) {
|
||||||
|
c2 = str.charCodeAt(mPos + 1);
|
||||||
|
if ((c2 & 0xfc00) === 0xdc00) {
|
||||||
|
c = 0x10000 + (c - 0xd800 << 10) + (c2 - 0xdc00);
|
||||||
|
mPos++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
bufLen += c < 0x80 ? 1 : c < 0x800 ? 2 : c < 0x10000 ? 3 : 4;
|
||||||
|
}
|
||||||
|
|
||||||
|
// allocate buffer
|
||||||
|
var buf = new Uint8Array(bufLen);
|
||||||
|
|
||||||
|
// convert
|
||||||
|
for (i = 0, mPos = 0; i < bufLen; mPos++) {
|
||||||
|
c = str.charCodeAt(mPos);
|
||||||
|
if ((c & 0xfc00) === 0xd800 && mPos + 1 < strLen) {
|
||||||
|
c2 = str.charCodeAt(mPos + 1);
|
||||||
|
if ((c2 & 0xfc00) === 0xdc00) {
|
||||||
|
c = 0x10000 + (c - 0xd800 << 10) + (c2 - 0xdc00);
|
||||||
|
mPos++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (c < 0x80) {
|
||||||
|
/* one byte */
|
||||||
|
buf[i++] = c;
|
||||||
|
} else if (c < 0x800) {
|
||||||
|
/* two bytes */
|
||||||
|
buf[i++] = 0xc0 | c >>> 6;
|
||||||
|
buf[i++] = 0x80 | c & 0x3f;
|
||||||
|
} else if (c < 0x10000) {
|
||||||
|
/* three bytes */
|
||||||
|
buf[i++] = 0xe0 | c >>> 12;
|
||||||
|
buf[i++] = 0x80 | c >>> 6 & 0x3f;
|
||||||
|
buf[i++] = 0x80 | c & 0x3f;
|
||||||
|
} else {
|
||||||
|
/* four bytes */
|
||||||
|
buf[i++] = 0xf0 | c >>> 18;
|
||||||
|
buf[i++] = 0x80 | c >>> 12 & 0x3f;
|
||||||
|
buf[i++] = 0x80 | c >>> 6 & 0x3f;
|
||||||
|
buf[i++] = 0x80 | c & 0x3f;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return buf;
|
||||||
|
}
|
||||||
|
// Stryker restore all
|
||||||
|
|
||||||
|
function postrender(parts, options) {
|
||||||
|
for (var _i2 = 0, _options$modules2 = options.modules; _i2 < _options$modules2.length; _i2++) {
|
||||||
|
var _module = _options$modules2[_i2];
|
||||||
|
parts = _module.postrender(parts, options);
|
||||||
|
}
|
||||||
|
var fullLength = 0;
|
||||||
|
var newParts = options.joinUncorrupt(parts, options);
|
||||||
|
var longStr = "";
|
||||||
|
var lenStr = 0;
|
||||||
|
var maxCompact = 65536;
|
||||||
|
var uintArrays = [];
|
||||||
|
for (var i = 0, len = newParts.length; i < len; i++) {
|
||||||
|
var part = newParts[i];
|
||||||
|
|
||||||
|
/*
|
||||||
|
* This condition should be hit in the integration test at :
|
||||||
|
* it("should not regress with long file (hit maxCompact value of 65536)", function () {
|
||||||
|
* Stryker disable all : because this is an optimisation that won't make any tests fail
|
||||||
|
*/
|
||||||
|
if (part.length + lenStr > maxCompact) {
|
||||||
|
var _arr = string2buf(longStr);
|
||||||
|
fullLength += _arr.length;
|
||||||
|
uintArrays.push(_arr);
|
||||||
|
longStr = "";
|
||||||
|
}
|
||||||
|
// Stryker restore all
|
||||||
|
|
||||||
|
longStr += part;
|
||||||
|
lenStr += part.length;
|
||||||
|
delete newParts[i];
|
||||||
|
}
|
||||||
|
var arr = string2buf(longStr);
|
||||||
|
fullLength += arr.length;
|
||||||
|
uintArrays.push(arr);
|
||||||
|
var array = new Uint8Array(fullLength);
|
||||||
|
var j = 0;
|
||||||
|
|
||||||
|
// Stryker disable all : because this is an optimisation that won't make any tests fail
|
||||||
|
for (var _i4 = 0; _i4 < uintArrays.length; _i4++) {
|
||||||
|
var buf = uintArrays[_i4];
|
||||||
|
for (var _i5 = 0; _i5 < buf.length; ++_i5) {
|
||||||
|
array[_i5 + j] = buf[_i5];
|
||||||
|
}
|
||||||
|
j += buf.length;
|
||||||
|
} // Stryker restore all
|
||||||
|
return array;
|
||||||
|
}
|
||||||
|
module.exports = postrender;
|
||||||
48
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/prefix-matcher.js
generated
vendored
Normal file
48
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/prefix-matcher.js
generated
vendored
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
function _typeof(o) { "@babel/helpers - typeof"; return _typeof = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function (o) { return typeof o; } : function (o) { return o && "function" == typeof Symbol && o.constructor === Symbol && o !== Symbol.prototype ? "symbol" : typeof o; }, _typeof(o); }
|
||||||
|
var nbspRegex = new RegExp(String.fromCharCode(160), "g");
|
||||||
|
function replaceNbsps(str) {
|
||||||
|
return str.replace(nbspRegex, " ");
|
||||||
|
}
|
||||||
|
function match(condition, placeHolderContent) {
|
||||||
|
var type = _typeof(condition);
|
||||||
|
if (type === "string") {
|
||||||
|
return replaceNbsps(placeHolderContent.substr(0, condition.length)) === condition;
|
||||||
|
}
|
||||||
|
if (condition instanceof RegExp) {
|
||||||
|
return condition.test(replaceNbsps(placeHolderContent));
|
||||||
|
}
|
||||||
|
if (type === "function") {
|
||||||
|
return !!condition(placeHolderContent);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function getValue(condition, placeHolderContent) {
|
||||||
|
var type = _typeof(condition);
|
||||||
|
if (type === "string") {
|
||||||
|
return replaceNbsps(placeHolderContent).substr(condition.length);
|
||||||
|
}
|
||||||
|
if (condition instanceof RegExp) {
|
||||||
|
return replaceNbsps(placeHolderContent).match(condition)[1];
|
||||||
|
}
|
||||||
|
if (type === "function") {
|
||||||
|
return condition(placeHolderContent);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function getValues(condition, placeHolderContent) {
|
||||||
|
var type = _typeof(condition);
|
||||||
|
if (type === "string") {
|
||||||
|
return [placeHolderContent, replaceNbsps(placeHolderContent).substr(condition.length)];
|
||||||
|
}
|
||||||
|
if (condition instanceof RegExp) {
|
||||||
|
return replaceNbsps(placeHolderContent).match(condition);
|
||||||
|
}
|
||||||
|
if (type === "function") {
|
||||||
|
return [placeHolderContent, condition(placeHolderContent)];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
module.exports = {
|
||||||
|
match: match,
|
||||||
|
getValue: getValue,
|
||||||
|
getValues: getValues
|
||||||
|
};
|
||||||
27
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/proof-state-module.js
generated
vendored
Normal file
27
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/proof-state-module.js
generated
vendored
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
var _require = require("./content-types.js"),
|
||||||
|
settingsContentType = _require.settingsContentType;
|
||||||
|
module.exports = {
|
||||||
|
name: "ProofStateModule",
|
||||||
|
on: function on(eventName) {
|
||||||
|
if (eventName === "attached") {
|
||||||
|
this.attached = false;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
postparse: function postparse(postparsed, _ref) {
|
||||||
|
var contentType = _ref.contentType;
|
||||||
|
if (contentType !== settingsContentType) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return postparsed.map(function (part) {
|
||||||
|
if (part.type === "tag" && part.tag === "w:proofState") {
|
||||||
|
return {
|
||||||
|
type: "content",
|
||||||
|
value: ""
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return part;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
72
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/render.js
generated
vendored
Normal file
72
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/render.js
generated
vendored
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
var _require = require("./errors.js"),
|
||||||
|
throwUnimplementedTagType = _require.throwUnimplementedTagType,
|
||||||
|
XTScopeParserError = _require.XTScopeParserError;
|
||||||
|
var _require2 = require("./doc-utils.js"),
|
||||||
|
pushArray = _require2.pushArray;
|
||||||
|
var getResolvedId = require("./get-resolved-id.js");
|
||||||
|
function moduleRender(part, options) {
|
||||||
|
for (var _i2 = 0, _options$modules2 = options.modules; _i2 < _options$modules2.length; _i2++) {
|
||||||
|
var _module = _options$modules2[_i2];
|
||||||
|
var moduleRendered = _module.render(part, options);
|
||||||
|
if (moduleRendered) {
|
||||||
|
return moduleRendered;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
function render(options) {
|
||||||
|
var baseNullGetter = options.baseNullGetter;
|
||||||
|
var compiled = options.compiled,
|
||||||
|
scopeManager = options.scopeManager;
|
||||||
|
options.nullGetter = function (part, sm) {
|
||||||
|
return baseNullGetter(part, sm || scopeManager);
|
||||||
|
};
|
||||||
|
var errors = [];
|
||||||
|
var parts = [];
|
||||||
|
for (var i = 0, len = compiled.length; i < len; i++) {
|
||||||
|
var part = compiled[i];
|
||||||
|
options.index = i;
|
||||||
|
options.resolvedId = getResolvedId(part, options);
|
||||||
|
var moduleRendered = void 0;
|
||||||
|
try {
|
||||||
|
moduleRendered = moduleRender(part, options);
|
||||||
|
} catch (e) {
|
||||||
|
if (e instanceof XTScopeParserError) {
|
||||||
|
errors.push(e);
|
||||||
|
parts.push(part);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
|
if (moduleRendered) {
|
||||||
|
if (moduleRendered.errors) {
|
||||||
|
pushArray(errors, moduleRendered.errors);
|
||||||
|
}
|
||||||
|
parts.push(moduleRendered);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (part.type === "content" || part.type === "tag") {
|
||||||
|
parts.push(part);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
throwUnimplementedTagType(part, i);
|
||||||
|
}
|
||||||
|
|
||||||
|
// This is done in two steps because for some files, it is possible to #edit-value-backwards
|
||||||
|
var totalParts = [];
|
||||||
|
for (var _i4 = 0; _i4 < parts.length; _i4++) {
|
||||||
|
var value = parts[_i4].value;
|
||||||
|
if (value instanceof Array) {
|
||||||
|
pushArray(totalParts, value);
|
||||||
|
} else if (value) {
|
||||||
|
totalParts.push(value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
errors: errors,
|
||||||
|
parts: totalParts
|
||||||
|
};
|
||||||
|
}
|
||||||
|
module.exports = render;
|
||||||
78
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/resolve.js
generated
vendored
Normal file
78
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/resolve.js
generated
vendored
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
function _typeof(o) { "@babel/helpers - typeof"; return _typeof = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function (o) { return typeof o; } : function (o) { return o && "function" == typeof Symbol && o.constructor === Symbol && o !== Symbol.prototype ? "symbol" : typeof o; }, _typeof(o); }
|
||||||
|
function ownKeys(e, r) { var t = Object.keys(e); if (Object.getOwnPropertySymbols) { var o = Object.getOwnPropertySymbols(e); r && (o = o.filter(function (r) { return Object.getOwnPropertyDescriptor(e, r).enumerable; })), t.push.apply(t, o); } return t; }
|
||||||
|
function _objectSpread(e) { for (var r = 1; r < arguments.length; r++) { var t = null != arguments[r] ? arguments[r] : {}; r % 2 ? ownKeys(Object(t), !0).forEach(function (r) { _defineProperty(e, r, t[r]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(e, Object.getOwnPropertyDescriptors(t)) : ownKeys(Object(t)).forEach(function (r) { Object.defineProperty(e, r, Object.getOwnPropertyDescriptor(t, r)); }); } return e; }
|
||||||
|
function _defineProperty(e, r, t) { return (r = _toPropertyKey(r)) in e ? Object.defineProperty(e, r, { value: t, enumerable: !0, configurable: !0, writable: !0 }) : e[r] = t, e; }
|
||||||
|
function _toPropertyKey(t) { var i = _toPrimitive(t, "string"); return "symbol" == _typeof(i) ? i : i + ""; }
|
||||||
|
function _toPrimitive(t, r) { if ("object" != _typeof(t) || !t) return t; var e = t[Symbol.toPrimitive]; if (void 0 !== e) { var i = e.call(t, r || "default"); if ("object" != _typeof(i)) return i; throw new TypeError("@@toPrimitive must return a primitive value."); } return ("string" === r ? String : Number)(t); }
|
||||||
|
var _require = require("./doc-utils.js"),
|
||||||
|
pushArray = _require.pushArray;
|
||||||
|
var getResolvedId = require("./get-resolved-id.js");
|
||||||
|
function moduleResolve(part, options) {
|
||||||
|
for (var _i2 = 0, _options$modules2 = options.modules; _i2 < _options$modules2.length; _i2++) {
|
||||||
|
var _module = _options$modules2[_i2];
|
||||||
|
var moduleResolved = _module.resolve(part, options);
|
||||||
|
if (moduleResolved) {
|
||||||
|
return moduleResolved;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
function resolve(options) {
|
||||||
|
var resolved = [];
|
||||||
|
var baseNullGetter = options.baseNullGetter;
|
||||||
|
var compiled = options.compiled,
|
||||||
|
scopeManager = options.scopeManager;
|
||||||
|
options.nullGetter = function (part, sm) {
|
||||||
|
return baseNullGetter(part, sm || scopeManager);
|
||||||
|
};
|
||||||
|
options.resolved = resolved;
|
||||||
|
var errors = [];
|
||||||
|
return Promise.all(compiled.filter(function (part) {
|
||||||
|
return ["content", "tag"].indexOf(part.type) === -1;
|
||||||
|
}).reduce(function (promises, part) {
|
||||||
|
var moduleResolved = moduleResolve(part, _objectSpread(_objectSpread({}, options), {}, {
|
||||||
|
resolvedId: getResolvedId(part, options)
|
||||||
|
}));
|
||||||
|
var result;
|
||||||
|
if (moduleResolved) {
|
||||||
|
result = moduleResolved.then(function (value) {
|
||||||
|
resolved.push({
|
||||||
|
tag: part.value,
|
||||||
|
lIndex: part.lIndex,
|
||||||
|
value: value
|
||||||
|
});
|
||||||
|
});
|
||||||
|
} else if (part.type === "placeholder") {
|
||||||
|
result = scopeManager.getValueAsync(part.value, {
|
||||||
|
part: part
|
||||||
|
}).then(function (value) {
|
||||||
|
return value == null ? options.nullGetter(part) : value;
|
||||||
|
}).then(function (value) {
|
||||||
|
resolved.push({
|
||||||
|
tag: part.value,
|
||||||
|
lIndex: part.lIndex,
|
||||||
|
value: value
|
||||||
|
});
|
||||||
|
return value;
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
promises.push(result["catch"](function (e) {
|
||||||
|
if (e instanceof Array) {
|
||||||
|
pushArray(errors, e);
|
||||||
|
} else {
|
||||||
|
errors.push(e);
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
return promises;
|
||||||
|
}, [])).then(function () {
|
||||||
|
return {
|
||||||
|
errors: errors,
|
||||||
|
resolved: resolved
|
||||||
|
};
|
||||||
|
});
|
||||||
|
}
|
||||||
|
module.exports = resolve;
|
||||||
229
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/scope-manager.js
generated
vendored
Normal file
229
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/scope-manager.js
generated
vendored
Normal file
@@ -0,0 +1,229 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
function _typeof(o) { "@babel/helpers - typeof"; return _typeof = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function (o) { return typeof o; } : function (o) { return o && "function" == typeof Symbol && o.constructor === Symbol && o !== Symbol.prototype ? "symbol" : typeof o; }, _typeof(o); }
|
||||||
|
function _classCallCheck(a, n) { if (!(a instanceof n)) throw new TypeError("Cannot call a class as a function"); }
|
||||||
|
function _defineProperties(e, r) { for (var t = 0; t < r.length; t++) { var o = r[t]; o.enumerable = o.enumerable || !1, o.configurable = !0, "value" in o && (o.writable = !0), Object.defineProperty(e, _toPropertyKey(o.key), o); } }
|
||||||
|
function _createClass(e, r, t) { return r && _defineProperties(e.prototype, r), t && _defineProperties(e, t), Object.defineProperty(e, "prototype", { writable: !1 }), e; }
|
||||||
|
function _toPropertyKey(t) { var i = _toPrimitive(t, "string"); return "symbol" == _typeof(i) ? i : i + ""; }
|
||||||
|
function _toPrimitive(t, r) { if ("object" != _typeof(t) || !t) return t; var e = t[Symbol.toPrimitive]; if (void 0 !== e) { var i = e.call(t, r || "default"); if ("object" != _typeof(i)) return i; throw new TypeError("@@toPrimitive must return a primitive value."); } return ("string" === r ? String : Number)(t); }
|
||||||
|
var _require = require("./errors.js"),
|
||||||
|
getScopeParserExecutionError = _require.getScopeParserExecutionError;
|
||||||
|
var _require2 = require("./utils.js"),
|
||||||
|
last = _require2.last;
|
||||||
|
var _require3 = require("./doc-utils.js"),
|
||||||
|
concatArrays = _require3.concatArrays;
|
||||||
|
function find(list, fn) {
|
||||||
|
var length = list.length >>> 0;
|
||||||
|
var value;
|
||||||
|
for (var i = 0; i < length; i++) {
|
||||||
|
value = list[i];
|
||||||
|
if (fn.call(this, value, i, list)) {
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
function _getValue(tag, meta, num) {
|
||||||
|
var _this = this;
|
||||||
|
var scope = this.scopeList[num];
|
||||||
|
if (this.root.finishedResolving) {
|
||||||
|
var w = this.resolved;
|
||||||
|
var _loop = function _loop() {
|
||||||
|
var lIndex = _this.scopeLindex[i];
|
||||||
|
w = find(w, function (r) {
|
||||||
|
return r.lIndex === lIndex;
|
||||||
|
});
|
||||||
|
w = w.value[_this.scopePathItem[i]];
|
||||||
|
};
|
||||||
|
for (var i = this.resolveOffset, len = this.scopePath.length; i < len; i++) {
|
||||||
|
_loop();
|
||||||
|
}
|
||||||
|
return find(w, function (r) {
|
||||||
|
return meta.part.lIndex === r.lIndex;
|
||||||
|
}).value;
|
||||||
|
}
|
||||||
|
// search in the scopes (in reverse order) and keep the first defined value
|
||||||
|
var result;
|
||||||
|
var parser;
|
||||||
|
if (!this.cachedParsers || !meta.part) {
|
||||||
|
parser = this.parser(tag, {
|
||||||
|
tag: meta.part,
|
||||||
|
scopePath: this.scopePath
|
||||||
|
});
|
||||||
|
} else if (this.cachedParsers[meta.part.lIndex]) {
|
||||||
|
parser = this.cachedParsers[meta.part.lIndex];
|
||||||
|
} else {
|
||||||
|
parser = this.cachedParsers[meta.part.lIndex] = this.parser(tag, {
|
||||||
|
tag: meta.part,
|
||||||
|
scopePath: this.scopePath
|
||||||
|
});
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
result = parser.get(scope, this.getContext(meta, num));
|
||||||
|
} catch (error) {
|
||||||
|
throw getScopeParserExecutionError({
|
||||||
|
tag: tag,
|
||||||
|
scope: scope,
|
||||||
|
error: error,
|
||||||
|
offset: meta.part.offset
|
||||||
|
});
|
||||||
|
}
|
||||||
|
if (result == null && num > 0) {
|
||||||
|
return _getValue.call(this, tag, meta, num - 1);
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
function _getValueAsync(tag, meta, num) {
|
||||||
|
var _this2 = this;
|
||||||
|
var scope = this.scopeList[num];
|
||||||
|
// search in the scopes (in reverse order) and keep the first defined value
|
||||||
|
var parser;
|
||||||
|
if (!this.cachedParsers || !meta.part) {
|
||||||
|
parser = this.parser(tag, {
|
||||||
|
tag: meta.part,
|
||||||
|
scopePath: this.scopePath
|
||||||
|
});
|
||||||
|
} else if (this.cachedParsers[meta.part.lIndex]) {
|
||||||
|
parser = this.cachedParsers[meta.part.lIndex];
|
||||||
|
} else {
|
||||||
|
parser = this.cachedParsers[meta.part.lIndex] = this.parser(tag, {
|
||||||
|
tag: meta.part,
|
||||||
|
scopePath: this.scopePath
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return Promise.resolve().then(function () {
|
||||||
|
return parser.get(scope, _this2.getContext(meta, num));
|
||||||
|
})["catch"](function (error) {
|
||||||
|
throw getScopeParserExecutionError({
|
||||||
|
tag: tag,
|
||||||
|
scope: scope,
|
||||||
|
error: error,
|
||||||
|
offset: meta.part.offset
|
||||||
|
});
|
||||||
|
}).then(function (result) {
|
||||||
|
if (result == null && num > 0) {
|
||||||
|
return _getValueAsync.call(_this2, tag, meta, num - 1);
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
var ScopeManager = /*#__PURE__*/function () {
|
||||||
|
function ScopeManager(options) {
|
||||||
|
_classCallCheck(this, ScopeManager);
|
||||||
|
this.root = options.root || this;
|
||||||
|
this.resolveOffset = options.resolveOffset || 0;
|
||||||
|
this.scopePath = options.scopePath;
|
||||||
|
this.scopePathItem = options.scopePathItem;
|
||||||
|
this.scopePathLength = options.scopePathLength;
|
||||||
|
this.scopeList = options.scopeList;
|
||||||
|
this.scopeType = "";
|
||||||
|
this.scopeTypes = options.scopeTypes;
|
||||||
|
this.scopeLindex = options.scopeLindex;
|
||||||
|
this.parser = options.parser;
|
||||||
|
this.resolved = options.resolved;
|
||||||
|
this.cachedParsers = options.cachedParsers;
|
||||||
|
}
|
||||||
|
return _createClass(ScopeManager, [{
|
||||||
|
key: "loopOver",
|
||||||
|
value: function loopOver(tag, functor, inverted, meta) {
|
||||||
|
return this.loopOverValue(this.getValue(tag, meta), functor, inverted);
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "functorIfInverted",
|
||||||
|
value: function functorIfInverted(inverted, functor, value, i, length) {
|
||||||
|
if (inverted) {
|
||||||
|
functor(value, i, length);
|
||||||
|
}
|
||||||
|
return inverted;
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "isValueFalsy",
|
||||||
|
value: function isValueFalsy(value, type) {
|
||||||
|
return value == null || !value || type === "[object Array]" && value.length === 0;
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "loopOverValue",
|
||||||
|
value: function loopOverValue(value, functor, inverted) {
|
||||||
|
if (this.root.finishedResolving) {
|
||||||
|
inverted = false;
|
||||||
|
}
|
||||||
|
var type = Object.prototype.toString.call(value);
|
||||||
|
if (this.isValueFalsy(value, type)) {
|
||||||
|
this.scopeType = false;
|
||||||
|
return this.functorIfInverted(inverted, functor, last(this.scopeList), 0, 1);
|
||||||
|
}
|
||||||
|
if (type === "[object Array]") {
|
||||||
|
this.scopeType = "array";
|
||||||
|
for (var i = 0; i < value.length; i++) {
|
||||||
|
this.functorIfInverted(!inverted, functor, value[i], i, value.length);
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (type === "[object Object]") {
|
||||||
|
this.scopeType = "object";
|
||||||
|
return this.functorIfInverted(!inverted, functor, value, 0, 1);
|
||||||
|
}
|
||||||
|
return this.functorIfInverted(!inverted, functor, last(this.scopeList), 0, 1);
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "getValue",
|
||||||
|
value: function getValue(tag, meta) {
|
||||||
|
var result = _getValue.call(this, tag, meta, this.scopeList.length - 1);
|
||||||
|
if (typeof result === "function") {
|
||||||
|
return result(this.scopeList[this.scopeList.length - 1], this);
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "getValueAsync",
|
||||||
|
value: function getValueAsync(tag, meta) {
|
||||||
|
var _this3 = this;
|
||||||
|
return _getValueAsync.call(this, tag, meta, this.scopeList.length - 1).then(function (result) {
|
||||||
|
if (typeof result === "function") {
|
||||||
|
return result(_this3.scopeList[_this3.scopeList.length - 1], _this3);
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "getContext",
|
||||||
|
value: function getContext(meta, num) {
|
||||||
|
return {
|
||||||
|
num: num,
|
||||||
|
meta: meta,
|
||||||
|
scopeList: this.scopeList,
|
||||||
|
resolved: this.resolved,
|
||||||
|
scopePath: this.scopePath,
|
||||||
|
scopeTypes: this.scopeTypes,
|
||||||
|
scopePathItem: this.scopePathItem,
|
||||||
|
scopePathLength: this.scopePathLength
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "createSubScopeManager",
|
||||||
|
value: function createSubScopeManager(scope, tag, i, part, length) {
|
||||||
|
return new ScopeManager({
|
||||||
|
root: this.root,
|
||||||
|
resolveOffset: this.resolveOffset,
|
||||||
|
resolved: this.resolved,
|
||||||
|
parser: this.parser,
|
||||||
|
cachedParsers: this.cachedParsers,
|
||||||
|
scopeTypes: concatArrays([this.scopeTypes, [this.scopeType]]),
|
||||||
|
scopeList: concatArrays([this.scopeList, [scope]]),
|
||||||
|
scopePath: concatArrays([this.scopePath, [tag]]),
|
||||||
|
scopePathItem: concatArrays([this.scopePathItem, [i]]),
|
||||||
|
scopePathLength: concatArrays([this.scopePathLength, [length]]),
|
||||||
|
scopeLindex: concatArrays([this.scopeLindex, [part.lIndex]])
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}]);
|
||||||
|
}();
|
||||||
|
module.exports = function (options) {
|
||||||
|
options.scopePath = [];
|
||||||
|
options.scopePathItem = [];
|
||||||
|
options.scopePathLength = [];
|
||||||
|
options.scopeTypes = [];
|
||||||
|
options.scopeLindex = [];
|
||||||
|
options.scopeList = [options.tags];
|
||||||
|
return new ScopeManager(options);
|
||||||
|
};
|
||||||
12
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/test-text.js
generated
vendored
Normal file
12
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/test-text.js
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
var TxtTemplater = require("../text.js");
|
||||||
|
var doc = new TxtTemplater("Hello {user}, how are you ?");
|
||||||
|
var result = doc.render({
|
||||||
|
user: "John"
|
||||||
|
});
|
||||||
|
if (result !== "Hello John, how are you ?") {
|
||||||
|
// eslint-disable-next-line no-console
|
||||||
|
console.log(result);
|
||||||
|
throw new Error("TxtTemplater did not work as expected");
|
||||||
|
}
|
||||||
163
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/tests/assertion-module.js
generated
vendored
Normal file
163
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/tests/assertion-module.js
generated
vendored
Normal file
@@ -0,0 +1,163 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
function _typeof(o) { "@babel/helpers - typeof"; return _typeof = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function (o) { return typeof o; } : function (o) { return o && "function" == typeof Symbol && o.constructor === Symbol && o !== Symbol.prototype ? "symbol" : typeof o; }, _typeof(o); }
|
||||||
|
function _classCallCheck(a, n) { if (!(a instanceof n)) throw new TypeError("Cannot call a class as a function"); }
|
||||||
|
function _defineProperties(e, r) { for (var t = 0; t < r.length; t++) { var o = r[t]; o.enumerable = o.enumerable || !1, o.configurable = !0, "value" in o && (o.writable = !0), Object.defineProperty(e, _toPropertyKey(o.key), o); } }
|
||||||
|
function _createClass(e, r, t) { return r && _defineProperties(e.prototype, r), t && _defineProperties(e, t), Object.defineProperty(e, "prototype", { writable: !1 }), e; }
|
||||||
|
function _toPropertyKey(t) { var i = _toPrimitive(t, "string"); return "symbol" == _typeof(i) ? i : i + ""; }
|
||||||
|
function _toPrimitive(t, r) { if ("object" != _typeof(t) || !t) return t; var e = t[Symbol.toPrimitive]; if (void 0 !== e) { var i = e.call(t, r || "default"); if ("object" != _typeof(i)) return i; throw new TypeError("@@toPrimitive must return a primitive value."); } return ("string" === r ? String : Number)(t); }
|
||||||
|
function logContext(parsed, i) {
|
||||||
|
var context = parsed.slice(i - 2, i + 2);
|
||||||
|
// eslint-disable-next-line no-console
|
||||||
|
console.log(JSON.stringify({
|
||||||
|
context: context
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
function isArray(thing) {
|
||||||
|
return thing instanceof Array;
|
||||||
|
}
|
||||||
|
function isObject(thing) {
|
||||||
|
return thing instanceof Object && !isArray(thing);
|
||||||
|
}
|
||||||
|
function isString(thing) {
|
||||||
|
return typeof thing === "string";
|
||||||
|
}
|
||||||
|
function verifyPart(part) {
|
||||||
|
if (part == null) {
|
||||||
|
throw new Error("postparsed contains nullish value");
|
||||||
|
}
|
||||||
|
if (!part) {
|
||||||
|
throw new Error("postparsed contains falsy value");
|
||||||
|
}
|
||||||
|
if (typeof part.type !== "string") {
|
||||||
|
throw new Error("postparsed contains part without type");
|
||||||
|
}
|
||||||
|
if (["content", "tag", "placeholder"].indexOf(part.type) === -1) {
|
||||||
|
throw new Error("postparsed contains part with invalid type : '".concat(part.type, "'"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function verifyOptions(options) {
|
||||||
|
if (!isString(options.contentType)) {
|
||||||
|
throw new Error("contentType should be a string");
|
||||||
|
}
|
||||||
|
if (!isString(options.filePath)) {
|
||||||
|
throw new Error("filePath should be a string");
|
||||||
|
}
|
||||||
|
if (!isString(options.fileType)) {
|
||||||
|
throw new Error("fileType should be a string");
|
||||||
|
}
|
||||||
|
if (!isObject(options.fileTypeConfig)) {
|
||||||
|
throw new Error("fileTypeConfig should be an object");
|
||||||
|
}
|
||||||
|
if (!isObject(options.cachedParsers)) {
|
||||||
|
throw new Error("cachedParsers should be an object");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
var AssertionModule = /*#__PURE__*/function () {
|
||||||
|
function AssertionModule() {
|
||||||
|
_classCallCheck(this, AssertionModule);
|
||||||
|
this.name = "AssertionModule";
|
||||||
|
}
|
||||||
|
return _createClass(AssertionModule, [{
|
||||||
|
key: "optionsTransformer",
|
||||||
|
value: function optionsTransformer(options, docxtemplater) {
|
||||||
|
for (var _i2 = 0, _docxtemplater$module2 = docxtemplater.modules; _i2 < _docxtemplater$module2.length; _i2++) {
|
||||||
|
var _module = _docxtemplater$module2[_i2];
|
||||||
|
if (!_module.name) {
|
||||||
|
throw new Error("Unnamed module");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return options;
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "clone",
|
||||||
|
value: function clone() {
|
||||||
|
return new AssertionModule();
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "preparse",
|
||||||
|
value: function preparse(parsed) {
|
||||||
|
if (!isArray(parsed)) {
|
||||||
|
throw new Error("Parsed should be an array");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "matchers",
|
||||||
|
value: function matchers(options) {
|
||||||
|
if (!isArray(options.modules)) {
|
||||||
|
throw new Error("Options.modules should be an array");
|
||||||
|
}
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "parse",
|
||||||
|
value: function parse(placeholderContent, options) {
|
||||||
|
if (!isString(placeholderContent)) {
|
||||||
|
throw new Error("placeholderContent should be a string");
|
||||||
|
}
|
||||||
|
var type = options.type,
|
||||||
|
position = options.position,
|
||||||
|
filePath = options.filePath,
|
||||||
|
contentType = options.contentType,
|
||||||
|
lIndex = options.lIndex;
|
||||||
|
if (typeof type !== "string") {
|
||||||
|
throw new Error("parsed contains part without type");
|
||||||
|
}
|
||||||
|
if (type !== "delimiter") {
|
||||||
|
throw new Error("parsed contains part with invalid type : '".concat(type, "'"));
|
||||||
|
}
|
||||||
|
if (position !== "end") {
|
||||||
|
throw new Error("parsed contains part with invalid position : '".concat(position, "'"));
|
||||||
|
}
|
||||||
|
if (typeof filePath !== "string" || filePath.length === 0) {
|
||||||
|
throw new Error("parsed contains part without filePath");
|
||||||
|
}
|
||||||
|
if (typeof contentType !== "string" || contentType.length === 0) {
|
||||||
|
throw new Error("parsed contains part without contentType");
|
||||||
|
}
|
||||||
|
if (!lIndex) {
|
||||||
|
throw new Error("parsed contains part without lIndex");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "postparse",
|
||||||
|
value: function postparse(parsed, options) {
|
||||||
|
verifyOptions(options);
|
||||||
|
if (!isArray(parsed)) {
|
||||||
|
throw new Error("Parsed should be an array");
|
||||||
|
}
|
||||||
|
for (var i = 0, len = parsed.length; i < len; i++) {
|
||||||
|
var part = parsed[i];
|
||||||
|
try {
|
||||||
|
verifyPart(part);
|
||||||
|
} catch (e) {
|
||||||
|
logContext(parsed, i);
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "resolve",
|
||||||
|
value: function resolve(part, options) {
|
||||||
|
verifyOptions(options);
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "render",
|
||||||
|
value: function render(part, options) {
|
||||||
|
verifyPart(part);
|
||||||
|
verifyOptions(options);
|
||||||
|
if (!isObject(part)) {
|
||||||
|
throw new Error("part should be an object");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
key: "postrender",
|
||||||
|
value: function postrender(parts) {
|
||||||
|
if (!isArray(parts)) {
|
||||||
|
throw new Error("Parts should be an array");
|
||||||
|
}
|
||||||
|
return parts;
|
||||||
|
}
|
||||||
|
}]);
|
||||||
|
}();
|
||||||
|
module.exports = AssertionModule;
|
||||||
3
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/tests/data/raw-xml-pptx.js
generated
vendored
Normal file
3
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/tests/data/raw-xml-pptx.js
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
module.exports = "<p:sp>\n <p:nvSpPr>\n <p:cNvPr id=\"37\" name=\"CustomShape 2\"/>\n <p:cNvSpPr/>\n <p:nvPr/>\n </p:nvSpPr>\n <p:spPr>\n <a:xfrm>\n <a:off x=\"504000\" y=\"1769040\"/>\n <a:ext cx=\"9071280\" cy=\"4384080\"/>\n </a:xfrm>\n <a:prstGeom prst=\"rect\">\n <a:avLst/>\n </a:prstGeom>\n <a:noFill/>\n <a:ln>\n <a:noFill/>\n </a:ln>\n </p:spPr>\n <p:style>\n <a:lnRef idx=\"0\"/>\n <a:fillRef idx=\"0\"/>\n <a:effectRef idx=\"0\"/>\n <a:fontRef idx=\"minor\"/>\n </p:style>\n <p:txBody>\n <a:bodyPr lIns=\"0\" rIns=\"0\" tIns=\"0\" bIns=\"0\" anchor=\"ctr\"/>\n <a:p>\n <a:pPr algn=\"ctr\">\n <a:lnSpc>\n <a:spcPct val=\"100000\"/>\n </a:lnSpc>\n </a:pPr>\n <a:r>\n <a:rPr b=\"0\" lang=\"fr-FR\" sz=\"3200\" spc=\"-1\" strike=\"noStrike\">\n <a:solidFill>\n <a:srgbClr val=\"000000\"/>\n </a:solidFill>\n <a:uFill>\n <a:solidFill>\n <a:srgbClr val=\"ffffff\"/>\n </a:solidFill>\n </a:uFill>\n <a:latin typeface=\"Arial\"/>\n </a:rPr>\n <a:t>Hello World</a:t>\n </a:r>\n <a:endParaRPr b=\"0\" lang=\"fr-FR\" sz=\"1800\" spc=\"-1\" strike=\"noStrike\">\n <a:solidFill>\n <a:srgbClr val=\"000000\"/>\n </a:solidFill>\n <a:uFill>\n <a:solidFill>\n <a:srgbClr val=\"ffffff\"/>\n </a:solidFill>\n </a:uFill>\n <a:latin typeface=\"Arial\"/>\n </a:endParaRPr>\n </a:p>\n </p:txBody>\n</p:sp>";
|
||||||
167
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/tests/e2e/async.js
generated
vendored
Normal file
167
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/tests/e2e/async.js
generated
vendored
Normal file
@@ -0,0 +1,167 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
var _require = require("../utils.js"),
|
||||||
|
resolveSoon = _require.resolveSoon;
|
||||||
|
var fixDocPrCorruption = require("../../modules/fix-doc-pr-corruption.js");
|
||||||
|
describe("Resolver", function () {
|
||||||
|
it("should render the document correctly in async mode", function () {
|
||||||
|
return this.render({
|
||||||
|
async: true,
|
||||||
|
name: "office365.docx",
|
||||||
|
data: {
|
||||||
|
test: resolveSoon("Value"),
|
||||||
|
test2: "Value2"
|
||||||
|
},
|
||||||
|
options: {
|
||||||
|
paragraphLoop: true
|
||||||
|
},
|
||||||
|
expectedName: "expected-office365.docx",
|
||||||
|
expectedText: "Value Value2"
|
||||||
|
});
|
||||||
|
});
|
||||||
|
it("should work at parent level", function () {
|
||||||
|
return this.render({
|
||||||
|
name: "office365.docx",
|
||||||
|
data: resolveSoon({
|
||||||
|
test: resolveSoon("Value"),
|
||||||
|
test2: "Value2"
|
||||||
|
}),
|
||||||
|
options: {
|
||||||
|
paragraphLoop: true
|
||||||
|
},
|
||||||
|
expectedName: "expected-office365.docx",
|
||||||
|
async: true,
|
||||||
|
expectedText: "Value Value2"
|
||||||
|
});
|
||||||
|
});
|
||||||
|
it("should resolve loops", function () {
|
||||||
|
return this.render({
|
||||||
|
name: "multi-loop.docx",
|
||||||
|
data: {
|
||||||
|
companies: resolveSoon([{
|
||||||
|
name: "Acme",
|
||||||
|
users: resolveSoon([{
|
||||||
|
name: resolveSoon("John", 25)
|
||||||
|
}, resolveSoon({
|
||||||
|
name: "James"
|
||||||
|
})], 5)
|
||||||
|
}, resolveSoon({
|
||||||
|
name: resolveSoon("Emca"),
|
||||||
|
users: resolveSoon([{
|
||||||
|
name: "Mary"
|
||||||
|
}, {
|
||||||
|
name: "Liz"
|
||||||
|
}])
|
||||||
|
}, 20)]),
|
||||||
|
test2: "Value2"
|
||||||
|
},
|
||||||
|
options: {
|
||||||
|
paragraphLoop: true
|
||||||
|
},
|
||||||
|
expectedName: "expected-multi-loop.docx",
|
||||||
|
async: true
|
||||||
|
});
|
||||||
|
});
|
||||||
|
var dataNestedLoops = {
|
||||||
|
a: [{
|
||||||
|
d: "Hello world"
|
||||||
|
}]
|
||||||
|
};
|
||||||
|
it("should not regress with nested loops sync", function () {
|
||||||
|
return this.render({
|
||||||
|
name: "regression-complex-loops.docx",
|
||||||
|
data: dataNestedLoops,
|
||||||
|
expectedName: "expected-regression-complex-loops.docx"
|
||||||
|
});
|
||||||
|
});
|
||||||
|
it("should not regress when having [Content_Types.xml] contain Default instead of Override", function () {
|
||||||
|
return this.render({
|
||||||
|
name: "with-default-contenttype.docx",
|
||||||
|
expectedName: "expected-with-default-contenttype.docx"
|
||||||
|
});
|
||||||
|
});
|
||||||
|
it("should not regress with nested loops async", function () {
|
||||||
|
return this.renderV4({
|
||||||
|
name: "regression-complex-loops.docx",
|
||||||
|
data: dataNestedLoops,
|
||||||
|
expectedName: "expected-regression-complex-loops.docx",
|
||||||
|
async: true
|
||||||
|
});
|
||||||
|
});
|
||||||
|
var regressData = {
|
||||||
|
amount_wheels_car_1: "4",
|
||||||
|
amount_wheels_motorcycle_1: "2",
|
||||||
|
amount_wheels_car_2: "6",
|
||||||
|
amount_wheels_motorcycle_2: "3",
|
||||||
|
id: [{
|
||||||
|
car: "1",
|
||||||
|
motorcycle: ""
|
||||||
|
}]
|
||||||
|
};
|
||||||
|
it("should not regress with multiple loops sync", function () {
|
||||||
|
return this.renderV4({
|
||||||
|
name: "regression-loops-resolve.docx",
|
||||||
|
data: regressData,
|
||||||
|
expectedName: "expected-regression-loops-resolve.docx"
|
||||||
|
});
|
||||||
|
});
|
||||||
|
it("should not regress with multiple loops async", function () {
|
||||||
|
return this.renderV4({
|
||||||
|
name: "regression-loops-resolve.docx",
|
||||||
|
data: regressData,
|
||||||
|
expectedName: "expected-regression-loops-resolve.docx"
|
||||||
|
});
|
||||||
|
});
|
||||||
|
it("should not regress with long file (hit maxCompact value of 65536)", function () {
|
||||||
|
return this.renderV4({
|
||||||
|
name: "regression-loops-resolve.docx",
|
||||||
|
data: {
|
||||||
|
amount_wheels_car_1: "4",
|
||||||
|
amount_wheels_motorcycle_1: "2",
|
||||||
|
amount_wheels_car_2: "6",
|
||||||
|
amount_wheels_motorcycle_2: "3",
|
||||||
|
id: [{
|
||||||
|
car: "1",
|
||||||
|
motorcycle: "2"
|
||||||
|
}, {
|
||||||
|
car: "2",
|
||||||
|
motorcycle: "3"
|
||||||
|
}, {
|
||||||
|
car: "4",
|
||||||
|
motorcycle: "5"
|
||||||
|
}, {
|
||||||
|
car: "4",
|
||||||
|
motorcycle: "5"
|
||||||
|
}]
|
||||||
|
},
|
||||||
|
options: {
|
||||||
|
paragraphLoop: true
|
||||||
|
},
|
||||||
|
expectedName: "expected-regression-loops-resolve-long.docx",
|
||||||
|
async: true
|
||||||
|
});
|
||||||
|
});
|
||||||
|
it("should deduplicate a16:rowId tag", function () {
|
||||||
|
return this.renderV4({
|
||||||
|
name: "a16-row-id.pptx",
|
||||||
|
data: {
|
||||||
|
loop: [1, 2, 3, 4]
|
||||||
|
},
|
||||||
|
expectedName: "expected-a16-row-id.pptx",
|
||||||
|
async: true
|
||||||
|
});
|
||||||
|
});
|
||||||
|
it("should work with fix doc pr corruption", function () {
|
||||||
|
return this.renderV4({
|
||||||
|
name: "loop-image.docx",
|
||||||
|
options: {
|
||||||
|
modules: [fixDocPrCorruption]
|
||||||
|
},
|
||||||
|
data: {
|
||||||
|
loop: [1, 2, 3, 4]
|
||||||
|
},
|
||||||
|
expectedName: "expected-loop-images.docx",
|
||||||
|
async: true
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
1210
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/tests/e2e/base.js
generated
vendored
Normal file
1210
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/tests/e2e/base.js
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
68
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/tests/e2e/doc-props.js
generated
vendored
Normal file
68
tradeCattle/aiotagro-cattle-trade/node_modules/docxtemplater/js/tests/e2e/doc-props.js
generated
vendored
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
var _require = require("../utils.js"),
|
||||||
|
createDocV4 = _require.createDocV4,
|
||||||
|
shouldBeSame = _require.shouldBeSame,
|
||||||
|
expect = _require.expect;
|
||||||
|
describe("Docx document properties", function () {
|
||||||
|
it("should change values in doc-props", function () {
|
||||||
|
var doc = createDocV4("tag-docprops.docx", {
|
||||||
|
paragraphLoop: true
|
||||||
|
});
|
||||||
|
expect(doc.getFullText("docProps/app.xml")).to.be.equal("TitleName: {first_name}");
|
||||||
|
doc.render({
|
||||||
|
first_name: "Hipp",
|
||||||
|
last_name: "Edgar",
|
||||||
|
phone: "0652455478",
|
||||||
|
description: "New Website"
|
||||||
|
});
|
||||||
|
expect(doc.getFullText()).to.be.equal("Edgar Hipp");
|
||||||
|
expect(doc.getFullText("word/header1.xml")).to.be.equal("Edgar Hipp0652455478New Website");
|
||||||
|
expect(doc.getFullText("word/footer1.xml")).to.be.equal("EdgarHipp0652455478");
|
||||||
|
expect(doc.getFullText("docProps/app.xml")).to.be.equal("TitleName: Hipp");
|
||||||
|
shouldBeSame({
|
||||||
|
doc: doc,
|
||||||
|
expectedName: "expected-tag-docprops.docx"
|
||||||
|
});
|
||||||
|
});
|
||||||
|
it("should change custom values inside '<vt:lpwstr>' in file docProps/custom.xml", function () {
|
||||||
|
return this.render({
|
||||||
|
name: "tag-docprops-in-doc.docx",
|
||||||
|
data: {
|
||||||
|
first_name: "Hipp",
|
||||||
|
email: "john@acme.com",
|
||||||
|
last_name: "Edgar",
|
||||||
|
phone: "0652455478",
|
||||||
|
description: "New Website"
|
||||||
|
},
|
||||||
|
expectedName: "expected-tag-docprops-in-doc.docx"
|
||||||
|
});
|
||||||
|
});
|
||||||
|
it("should be possible to ignore files in docProps/core.xml", function () {
|
||||||
|
var avoidRenderingCoreXMLModule = {
|
||||||
|
name: "avoidRenderingCoreXMLModule",
|
||||||
|
getFileType: function getFileType(_ref) {
|
||||||
|
var doc = _ref.doc;
|
||||||
|
doc.targets = doc.targets.filter(function (file) {
|
||||||
|
if (file === "docProps/core.xml" || file === "docProps/app.xml") {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
var doc = createDocV4("core-xml-missing-close-tag.docx", {
|
||||||
|
modules: [avoidRenderingCoreXMLModule]
|
||||||
|
});
|
||||||
|
doc.render({
|
||||||
|
first_name: "Hipp",
|
||||||
|
last_name: "Edgar",
|
||||||
|
phone: "0652455478",
|
||||||
|
description: "New Website"
|
||||||
|
});
|
||||||
|
shouldBeSame({
|
||||||
|
doc: doc,
|
||||||
|
expectedName: "expected-core-xml.docx"
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user