Refactor assets bundler and split Javascript files

This commit is contained in:
Frédéric Guillot 2018-07-05 22:18:51 -07:00
parent e1c56b2e53
commit 53deb0b8cd
49 changed files with 2837 additions and 2000 deletions

8
Gopkg.lock generated
View file

@ -54,8 +54,8 @@
"css", "css",
"js" "js"
] ]
revision = "222672169d634c440a73abc47685074e1a9daa60" revision = "8d72a4127ae33b755e95bffede9b92e396267ce2"
version = "v2.3.4" version = "v2.3.5"
[[projects]] [[projects]]
name = "github.com/tdewolff/parse" name = "github.com/tdewolff/parse"
@ -66,8 +66,8 @@
"js", "js",
"strconv" "strconv"
] ]
revision = "639f6272aec6b52094db77b9ec488214b0b4b1a1" revision = "d739d6fccb0971177e06352fea02d3552625efb1"
version = "v2.3.2" version = "v2.3.3"
[[projects]] [[projects]]
branch = "master" branch = "master"

View file

@ -35,82 +35,160 @@ var {{ .Map }}Checksums = map[string]string{
{{ end }}} {{ end }}}
` `
var generatedTpl = template.Must(template.New("").Parse(tpl)) var bundleTpl = template.Must(template.New("").Parse(tpl))
type GeneratedFile struct { type Bundle struct {
Package, Map string Package, Map string
Files map[string]string Files map[string]string
Checksums map[string]string Checksums map[string]string
} }
func normalizeBasename(filename string) string { func (b *Bundle) Write(filename string) {
filename = strings.TrimSuffix(filename, filepath.Ext(filename)) f, err := os.Create(filename)
return strings.Replace(filename, " ", "_", -1)
}
func generateFile(serializer, pkg, mapName, pattern, output string) {
generatedFile := &GeneratedFile{
Package: pkg,
Map: mapName,
Files: make(map[string]string),
Checksums: make(map[string]string),
}
files, _ := filepath.Glob(pattern)
for _, file := range files {
basename := path.Base(file)
content, err := ioutil.ReadFile(file)
if err != nil {
panic(err)
}
switch serializer {
case "css":
m := minify.New()
m.AddFunc("text/css", css.Minify)
content, err = m.Bytes("text/css", content)
if err != nil {
panic(err)
}
basename = normalizeBasename(basename)
generatedFile.Files[basename] = string(content)
case "js":
m := minify.New()
m.AddFunc("text/javascript", js.Minify)
content, err = m.Bytes("text/javascript", content)
if err != nil {
panic(err)
}
basename = normalizeBasename(basename)
generatedFile.Files[basename] = string(content)
case "base64":
encodedContent := base64.StdEncoding.EncodeToString(content)
generatedFile.Files[basename] = encodedContent
default:
basename = normalizeBasename(basename)
generatedFile.Files[basename] = string(content)
}
generatedFile.Checksums[basename] = fmt.Sprintf("%x", sha256.Sum256(content))
}
f, err := os.Create(output)
if err != nil { if err != nil {
panic(err) panic(err)
} }
defer f.Close() defer f.Close()
generatedTpl.Execute(f, generatedFile) bundleTpl.Execute(f, b)
}
func NewBundle(pkg, mapName string) *Bundle {
return &Bundle{
Package: pkg,
Map: mapName,
Files: make(map[string]string),
Checksums: make(map[string]string),
}
}
func readFile(filename string) []byte {
data, err := ioutil.ReadFile(filename)
if err != nil {
panic(err)
}
return data
}
func checksum(data []byte) string {
return fmt.Sprintf("%x", sha256.Sum256(data))
}
func basename(filename string) string {
return path.Base(filename)
}
func stripExtension(filename string) string {
filename = strings.TrimSuffix(filename, filepath.Ext(filename))
return strings.Replace(filename, " ", "_", -1)
}
func glob(pattern string) []string {
files, _ := filepath.Glob(pattern)
return files
}
func concat(files []string) string {
var b strings.Builder
for _, file := range files {
b.Write(readFile(file))
}
return b.String()
}
func generateJSBundle(bundleFile string, srcFiles []string) {
var b strings.Builder
b.WriteString("(function() {'use strict';")
b.WriteString(concat(srcFiles))
b.WriteString("})();")
m := minify.New()
m.AddFunc("text/javascript", js.Minify)
output, err := m.String("text/javascript", b.String())
if err != nil {
panic(err)
}
bundle := NewBundle("static", "Javascript")
bundle.Files["app"] = output
bundle.Checksums["app"] = checksum([]byte(output))
bundle.Write(bundleFile)
}
func generateCSSBundle(bundleFile string, srcFiles []string) {
bundle := NewBundle("static", "Stylesheets")
for _, srcFile := range srcFiles {
data := readFile(srcFile)
filename := stripExtension(basename(srcFile))
m := minify.New()
m.AddFunc("text/css", css.Minify)
minifiedData, err := m.Bytes("text/css", data)
if err != nil {
panic(err)
}
bundle.Files[filename] = string(minifiedData)
bundle.Checksums[filename] = checksum(minifiedData)
}
bundle.Write(bundleFile)
}
func generateBinaryBundle(bundleFile string, srcFiles []string) {
bundle := NewBundle("static", "Binaries")
for _, srcFile := range srcFiles {
data := readFile(srcFile)
filename := basename(srcFile)
encodedData := base64.StdEncoding.EncodeToString(data)
bundle.Files[filename] = string(encodedData)
bundle.Checksums[filename] = checksum(data)
}
bundle.Write(bundleFile)
}
func generateBundle(bundleFile, pkg, mapName string, srcFiles []string) {
bundle := NewBundle(pkg, mapName)
for _, srcFile := range srcFiles {
data := readFile(srcFile)
filename := stripExtension(basename(srcFile))
bundle.Files[filename] = string(data)
bundle.Checksums[filename] = checksum(data)
}
bundle.Write(bundleFile)
} }
func main() { func main() {
generateFile("none", "sql", "SqlMap", "sql/*.sql", "sql/sql.go") generateJSBundle("ui/static/js.go", []string{
generateFile("base64", "static", "Binaries", "ui/static/bin/*", "ui/static/bin.go") "ui/static/js/dom_helper.js",
generateFile("css", "static", "Stylesheets", "ui/static/css/*.css", "ui/static/css.go") "ui/static/js/touch_handler.js",
generateFile("js", "static", "Javascript", "ui/static/js/*.js", "ui/static/js.go") "ui/static/js/keyboard_handler.js",
generateFile("none", "template", "templateViewsMap", "template/html/*.html", "template/views.go") "ui/static/js/mouse_handler.js",
generateFile("none", "template", "templateCommonMap", "template/html/common/*.html", "template/common.go") "ui/static/js/form_handler.js",
generateFile("none", "locale", "translations", "locale/translations/*.json", "locale/translations.go") "ui/static/js/request_builder.js",
"ui/static/js/unread_counter_handler.js",
"ui/static/js/entry_handler.js",
"ui/static/js/confirm_handler.js",
"ui/static/js/menu_handler.js",
"ui/static/js/modal_handler.js",
"ui/static/js/nav_handler.js",
"ui/static/js/bootstrap.js",
})
generateCSSBundle("ui/static/css.go", glob("ui/static/css/*.css"))
generateBinaryBundle("ui/static/bin.go", glob("ui/static/bin/*"))
generateBundle("sql/sql.go", "sql", "SqlMap", glob("sql/*.sql"))
generateBundle("template/views.go", "template", "templateViewsMap", glob("template/html/*.html"))
generateBundle("template/common.go", "template", "templateCommonMap", glob("template/html/common/*.html"))
generateBundle("locale/translations.go", "locale", "translations", glob("locale/translations/*.json"))
} }

View file

@ -34,8 +34,7 @@ if(this.queue.length>=2){this.queue=[];}};}
isEventIgnored(event){return event.target.tagName==="INPUT"||event.target.tagName==="TEXTAREA";} isEventIgnored(event){return event.target.tagName==="INPUT"||event.target.tagName==="TEXTAREA";}
getKey(event){const mapping={'Esc':'Escape','Up':'ArrowUp','Down':'ArrowDown','Left':'ArrowLeft','Right':'ArrowRight'};for(let key in mapping){if(mapping.hasOwnProperty(key)&&key===event.key){return mapping[key];}} getKey(event){const mapping={'Esc':'Escape','Up':'ArrowUp','Down':'ArrowDown','Left':'ArrowLeft','Right':'ArrowRight'};for(let key in mapping){if(mapping.hasOwnProperty(key)&&key===event.key){return mapping[key];}}
return event.key;}} return event.key;}}
class FormHandler{static handleSubmitButtons(){let elements=document.querySelectorAll("form");elements.forEach((element)=>{element.onsubmit=()=>{let button=document.querySelector("button");if(button){button.innerHTML=button.dataset.labelLoading;button.disabled=true;}};});}} class MouseHandler{onClick(selector,callback){let elements=document.querySelectorAll(selector);elements.forEach((element)=>{element.onclick=(event)=>{event.preventDefault();callback(event);};});}}class FormHandler{static handleSubmitButtons(){let elements=document.querySelectorAll("form");elements.forEach((element)=>{element.onsubmit=()=>{let button=document.querySelector("button");if(button){button.innerHTML=button.dataset.labelLoading;button.disabled=true;}};});}}
class MouseHandler{onClick(selector,callback){let elements=document.querySelectorAll(selector);elements.forEach((element)=>{element.onclick=(event)=>{event.preventDefault();callback(event);};});}}
class RequestBuilder{constructor(url){this.callback=null;this.url=url;this.options={method:"POST",cache:"no-cache",credentials:"include",body:null,headers:new Headers({"Content-Type":"application/json","X-Csrf-Token":this.getCsrfToken()})};} class RequestBuilder{constructor(url){this.callback=null;this.url=url;this.options={method:"POST",cache:"no-cache",credentials:"include",body:null,headers:new Headers({"Content-Type":"application/json","X-Csrf-Token":this.getCsrfToken()})};}
withBody(body){this.options.body=JSON.stringify(body);return this;} withBody(body){this.options.body=JSON.stringify(body);return this;}
withCallback(callback){this.callback=callback;return this;} withCallback(callback){this.callback=callback;return this;}
@ -94,5 +93,5 @@ document.addEventListener("DOMContentLoaded",function(){FormHandler.handleSubmit
} }
var JavascriptChecksums = map[string]string{ var JavascriptChecksums = map[string]string{
"app": "717f6c6431128b6263dc1f54edf6fd0c6efc3bcbc8c9baf23768c8f23ce53675", "app": "c090bbc7f503aa032b4cfe68b58bc4754133aaed4f77ff768ac63f41528f55c3",
} }

3
ui/static/js/.jshintrc Normal file
View file

@ -0,0 +1,3 @@
{
"esversion": 6
}

View file

@ -1,823 +0,0 @@
/*jshint esversion: 6 */
(function() {
'use strict';
class DomHelper {
static isVisible(element) {
return element.offsetParent !== null;
}
static openNewTab(url) {
let win = window.open("");
win.opener = null;
win.location = url;
win.focus();
}
static scrollPageTo(element) {
let windowScrollPosition = window.pageYOffset;
let windowHeight = document.documentElement.clientHeight;
let viewportPosition = windowScrollPosition + windowHeight;
let itemBottomPosition = element.offsetTop + element.offsetHeight;
if (viewportPosition - itemBottomPosition < 0 || viewportPosition - element.offsetTop > windowHeight) {
window.scrollTo(0, element.offsetTop - 10);
}
}
static getVisibleElements(selector) {
let elements = document.querySelectorAll(selector);
let result = [];
for (let i = 0; i < elements.length; i++) {
if (this.isVisible(elements[i])) {
result.push(elements[i]);
}
}
return result;
}
static findParent(element, selector) {
for (; element && element !== document; element = element.parentNode) {
if (element.classList.contains(selector)) {
return element;
}
}
return null;
}
}
class TouchHandler {
constructor() {
this.reset();
}
reset() {
this.touch = {
start: {x: -1, y: -1},
move: {x: -1, y: -1},
element: null
};
}
calculateDistance() {
if (this.touch.start.x >= -1 && this.touch.move.x >= -1) {
let horizontalDistance = Math.abs(this.touch.move.x - this.touch.start.x);
let verticalDistance = Math.abs(this.touch.move.y - this.touch.start.y);
if (horizontalDistance > 30 && verticalDistance < 70) {
return this.touch.move.x - this.touch.start.x;
}
}
return 0;
}
findElement(element) {
if (element.classList.contains("touch-item")) {
return element;
}
return DomHelper.findParent(element, "touch-item");
}
onTouchStart(event) {
if (event.touches === undefined || event.touches.length !== 1) {
return;
}
this.reset();
this.touch.start.x = event.touches[0].clientX;
this.touch.start.y = event.touches[0].clientY;
this.touch.element = this.findElement(event.touches[0].target);
}
onTouchMove(event) {
if (event.touches === undefined || event.touches.length !== 1 || this.element === null) {
return;
}
this.touch.move.x = event.touches[0].clientX;
this.touch.move.y = event.touches[0].clientY;
let distance = this.calculateDistance();
let absDistance = Math.abs(distance);
if (absDistance > 0) {
let opacity = 1 - (absDistance > 75 ? 0.9 : absDistance / 75 * 0.9);
let tx = distance > 75 ? 75 : (distance < -75 ? -75 : distance);
this.touch.element.style.opacity = opacity;
this.touch.element.style.transform = "translateX(" + tx + "px)";
}
}
onTouchEnd(event) {
if (event.touches === undefined) {
return;
}
if (this.touch.element !== null) {
let distance = Math.abs(this.calculateDistance());
if (distance > 75) {
EntryHandler.toggleEntryStatus(this.touch.element);
}
this.touch.element.style.opacity = 1;
this.touch.element.style.transform = "none";
}
this.reset();
}
listen() {
let elements = document.querySelectorAll(".touch-item");
elements.forEach((element) => {
element.addEventListener("touchstart", (e) => this.onTouchStart(e), false);
element.addEventListener("touchmove", (e) => this.onTouchMove(e), false);
element.addEventListener("touchend", (e) => this.onTouchEnd(e), false);
element.addEventListener("touchcancel", () => this.reset(), false);
});
}
}
class KeyboardHandler {
constructor() {
this.queue = [];
this.shortcuts = {};
}
on(combination, callback) {
this.shortcuts[combination] = callback;
}
listen() {
document.onkeydown = (event) => {
if (this.isEventIgnored(event)) {
return;
}
let key = this.getKey(event);
this.queue.push(key);
for (let combination in this.shortcuts) {
let keys = combination.split(" ");
if (keys.every((value, index) => value === this.queue[index])) {
this.queue = [];
this.shortcuts[combination](event);
return;
}
if (keys.length === 1 && key === keys[0]) {
this.queue = [];
this.shortcuts[combination](event);
return;
}
}
if (this.queue.length >= 2) {
this.queue = [];
}
};
}
isEventIgnored(event) {
return event.target.tagName === "INPUT" || event.target.tagName === "TEXTAREA";
}
getKey(event) {
const mapping = {
'Esc': 'Escape',
'Up': 'ArrowUp',
'Down': 'ArrowDown',
'Left': 'ArrowLeft',
'Right': 'ArrowRight'
};
for (let key in mapping) {
if (mapping.hasOwnProperty(key) && key === event.key) {
return mapping[key];
}
}
return event.key;
}
}
class FormHandler {
static handleSubmitButtons() {
let elements = document.querySelectorAll("form");
elements.forEach((element) => {
element.onsubmit = () => {
let button = document.querySelector("button");
if (button) {
button.innerHTML = button.dataset.labelLoading;
button.disabled = true;
}
};
});
}
}
class MouseHandler {
onClick(selector, callback) {
let elements = document.querySelectorAll(selector);
elements.forEach((element) => {
element.onclick = (event) => {
event.preventDefault();
callback(event);
};
});
}
}
class RequestBuilder {
constructor(url) {
this.callback = null;
this.url = url;
this.options = {
method: "POST",
cache: "no-cache",
credentials: "include",
body: null,
headers: new Headers({
"Content-Type": "application/json",
"X-Csrf-Token": this.getCsrfToken()
})
};
}
withBody(body) {
this.options.body = JSON.stringify(body);
return this;
}
withCallback(callback) {
this.callback = callback;
return this;
}
getCsrfToken() {
let element = document.querySelector("meta[name=X-CSRF-Token]");
if (element !== null) {
return element.getAttribute("value");
}
return "";
}
execute() {
fetch(new Request(this.url, this.options)).then((response) => {
if (this.callback) {
this.callback(response);
}
});
}
}
class UnreadCounterHandler {
static decrement(n) {
this.updateValue((current) => {
return current - n;
});
}
static increment(n) {
this.updateValue((current) => {
return current + n;
});
}
static updateValue(callback) {
let counterElements = document.querySelectorAll("span.unread-counter");
counterElements.forEach((element) => {
let oldValue = parseInt(element.textContent, 10);
element.innerHTML = callback(oldValue);
});
if (window.location.href.endsWith('/unread')) {
let oldValue = parseInt(document.title.split('(')[1], 10);
let newValue = callback(oldValue);
document.title = document.title.replace(
/(.*?)\(\d+\)(.*?)/,
function (match, prefix, suffix, offset, string) {
return prefix + '(' + newValue + ')' + suffix;
}
);
}
}
}
class EntryHandler {
static updateEntriesStatus(entryIDs, status, callback) {
let url = document.body.dataset.entriesStatusUrl;
let request = new RequestBuilder(url);
request.withBody({entry_ids: entryIDs, status: status});
request.withCallback(callback);
request.execute();
if (status === "read") {
UnreadCounterHandler.decrement(1);
} else {
UnreadCounterHandler.increment(1);
}
}
static toggleEntryStatus(element) {
let entryID = parseInt(element.dataset.id, 10);
let statuses = {read: "unread", unread: "read"};
for (let currentStatus in statuses) {
let newStatus = statuses[currentStatus];
if (element.classList.contains("item-status-" + currentStatus)) {
element.classList.remove("item-status-" + currentStatus);
element.classList.add("item-status-" + newStatus);
this.updateEntriesStatus([entryID], newStatus);
let link = element.querySelector("a[data-toggle-status]");
if (link) {
this.toggleLinkStatus(link);
}
break;
}
}
}
static toggleLinkStatus(link) {
if (link.dataset.value === "read") {
link.innerHTML = link.dataset.labelRead;
link.dataset.value = "unread";
} else {
link.innerHTML = link.dataset.labelUnread;
link.dataset.value = "read";
}
}
static toggleBookmark(element) {
element.innerHTML = element.dataset.labelLoading;
let request = new RequestBuilder(element.dataset.bookmarkUrl);
request.withCallback(() => {
if (element.dataset.value === "star") {
element.innerHTML = element.dataset.labelStar;
element.dataset.value = "unstar";
} else {
element.innerHTML = element.dataset.labelUnstar;
element.dataset.value = "star";
}
});
request.execute();
}
static markEntryAsRead(element) {
if (element.classList.contains("item-status-unread")) {
element.classList.remove("item-status-unread");
element.classList.add("item-status-read");
let entryID = parseInt(element.dataset.id, 10);
this.updateEntriesStatus([entryID], "read");
}
}
static saveEntry(element) {
if (element.dataset.completed) {
return;
}
element.innerHTML = element.dataset.labelLoading;
let request = new RequestBuilder(element.dataset.saveUrl);
request.withCallback(() => {
element.innerHTML = element.dataset.labelDone;
element.dataset.completed = true;
});
request.execute();
}
static fetchOriginalContent(element) {
if (element.dataset.completed) {
return;
}
element.innerHTML = element.dataset.labelLoading;
let request = new RequestBuilder(element.dataset.fetchContentUrl);
request.withCallback((response) => {
element.innerHTML = element.dataset.labelDone;
element.dataset.completed = true;
response.json().then((data) => {
if (data.hasOwnProperty("content")) {
document.querySelector(".entry-content").innerHTML = data.content;
}
});
});
request.execute();
}
}
class ConfirmHandler {
remove(url) {
let request = new RequestBuilder(url);
request.withCallback(() => window.location.reload());
request.execute();
}
handle(event) {
let questionElement = document.createElement("span");
let linkElement = event.target;
let containerElement = linkElement.parentNode;
linkElement.style.display = "none";
let yesElement = document.createElement("a");
yesElement.href = "#";
yesElement.appendChild(document.createTextNode(linkElement.dataset.labelYes));
yesElement.onclick = (event) => {
event.preventDefault();
let loadingElement = document.createElement("span");
loadingElement.className = "loading";
loadingElement.appendChild(document.createTextNode(linkElement.dataset.labelLoading));
questionElement.remove();
containerElement.appendChild(loadingElement);
this.remove(linkElement.dataset.url);
};
let noElement = document.createElement("a");
noElement.href = "#";
noElement.appendChild(document.createTextNode(linkElement.dataset.labelNo));
noElement.onclick = (event) => {
event.preventDefault();
linkElement.style.display = "inline";
questionElement.remove();
};
questionElement.className = "confirm";
questionElement.appendChild(document.createTextNode(linkElement.dataset.labelQuestion + " "));
questionElement.appendChild(yesElement);
questionElement.appendChild(document.createTextNode(", "));
questionElement.appendChild(noElement);
containerElement.appendChild(questionElement);
}
}
class MenuHandler {
clickMenuListItem(event) {
let element = event.target;
if (element.tagName === "A") {
window.location.href = element.getAttribute("href");
} else {
window.location.href = element.querySelector("a").getAttribute("href");
}
}
toggleMainMenu() {
let menu = document.querySelector(".header nav ul");
if (DomHelper.isVisible(menu)) {
menu.style.display = "none";
} else {
menu.style.display = "block";
}
let searchElement = document.querySelector(".header .search");
if (DomHelper.isVisible(searchElement)) {
searchElement.style.display = "none";
} else {
searchElement.style.display = "block";
}
}
}
class ModalHandler {
static exists() {
return document.getElementById("modal-container") !== null;
}
static open(fragment) {
if (ModalHandler.exists()) {
return;
}
let container = document.createElement("div");
container.id = "modal-container";
container.appendChild(document.importNode(fragment, true));
document.body.appendChild(container);
let closeButton = document.querySelector("a.btn-close-modal");
if (closeButton !== null) {
closeButton.onclick = (event) => {
event.preventDefault();
ModalHandler.close();
};
}
}
static close() {
let container = document.getElementById("modal-container");
if (container !== null) {
container.parentNode.removeChild(container);
}
}
}
class NavHandler {
setFocusToSearchInput(event) {
event.preventDefault();
event.stopPropagation();
let toggleSwitchElement = document.querySelector(".search-toggle-switch");
if (toggleSwitchElement) {
toggleSwitchElement.style.display = "none";
}
let searchFormElement = document.querySelector(".search-form");
if (searchFormElement) {
searchFormElement.style.display = "block";
}
let searchInputElement = document.getElementById("search-input");
if (searchInputElement) {
searchInputElement.focus();
searchInputElement.value = "";
}
}
showKeyboardShortcuts() {
let template = document.getElementById("keyboard-shortcuts");
if (template !== null) {
ModalHandler.open(template.content);
}
}
markPageAsRead() {
let items = DomHelper.getVisibleElements(".items .item");
let entryIDs = [];
items.forEach((element) => {
element.classList.add("item-status-read");
entryIDs.push(parseInt(element.dataset.id, 10));
});
if (entryIDs.length > 0) {
EntryHandler.updateEntriesStatus(entryIDs, "read", () => {
// This callback make sure the Ajax request reach the server before we reload the page.
this.goToPage("next", true);
});
}
}
saveEntry() {
if (this.isListView()) {
let currentItem = document.querySelector(".current-item");
if (currentItem !== null) {
let saveLink = currentItem.querySelector("a[data-save-entry]");
if (saveLink) {
EntryHandler.saveEntry(saveLink);
}
}
} else {
let saveLink = document.querySelector("a[data-save-entry]");
if (saveLink) {
EntryHandler.saveEntry(saveLink);
}
}
}
fetchOriginalContent() {
if (! this.isListView()){
let link = document.querySelector("a[data-fetch-content-entry]");
if (link) {
EntryHandler.fetchOriginalContent(link);
}
}
}
toggleEntryStatus() {
let currentItem = document.querySelector(".current-item");
if (currentItem !== null) {
// The order is important here,
// On the unread page, the read item will be hidden.
this.goToNextListItem();
EntryHandler.toggleEntryStatus(currentItem);
}
}
toggleBookmark() {
if (! this.isListView()) {
this.toggleBookmarkLink(document.querySelector(".entry"));
return;
}
let currentItem = document.querySelector(".current-item");
if (currentItem !== null) {
this.toggleBookmarkLink(currentItem);
}
}
toggleBookmarkLink(parent) {
let bookmarkLink = parent.querySelector("a[data-toggle-bookmark]");
if (bookmarkLink) {
EntryHandler.toggleBookmark(bookmarkLink);
}
}
openOriginalLink() {
let entryLink = document.querySelector(".entry h1 a");
if (entryLink !== null) {
DomHelper.openNewTab(entryLink.getAttribute("href"));
return;
}
let currentItemOriginalLink = document.querySelector(".current-item a[data-original-link]");
if (currentItemOriginalLink !== null) {
DomHelper.openNewTab(currentItemOriginalLink.getAttribute("href"));
// Move to the next item and if we are on the unread page mark this item as read.
let currentItem = document.querySelector(".current-item");
this.goToNextListItem();
EntryHandler.markEntryAsRead(currentItem);
}
}
openSelectedItem() {
let currentItemLink = document.querySelector(".current-item .item-title a");
if (currentItemLink !== null) {
window.location.href = currentItemLink.getAttribute("href");
}
}
/**
* @param {string} page Page to redirect to.
* @param {boolean} fallbackSelf Refresh actual page if the page is not found.
*/
goToPage(page, fallbackSelf) {
let element = document.querySelector("a[data-page=" + page + "]");
if (element) {
document.location.href = element.href;
} else if (fallbackSelf) {
window.location.reload();
}
}
goToPrevious() {
if (this.isListView()) {
this.goToPreviousListItem();
} else {
this.goToPage("previous");
}
}
goToNext() {
if (this.isListView()) {
this.goToNextListItem();
} else {
this.goToPage("next");
}
}
goToPreviousListItem() {
let items = DomHelper.getVisibleElements(".items .item");
if (items.length === 0) {
return;
}
if (document.querySelector(".current-item") === null) {
items[0].classList.add("current-item");
return;
}
for (let i = 0; i < items.length; i++) {
if (items[i].classList.contains("current-item")) {
items[i].classList.remove("current-item");
if (i - 1 >= 0) {
items[i - 1].classList.add("current-item");
DomHelper.scrollPageTo(items[i - 1]);
}
break;
}
}
}
goToNextListItem() {
let currentItem = document.querySelector(".current-item");
let items = DomHelper.getVisibleElements(".items .item");
if (items.length === 0) {
return;
}
if (currentItem === null) {
items[0].classList.add("current-item");
return;
}
for (let i = 0; i < items.length; i++) {
if (items[i].classList.contains("current-item")) {
items[i].classList.remove("current-item");
if (i + 1 < items.length) {
items[i + 1].classList.add("current-item");
DomHelper.scrollPageTo(items[i + 1]);
}
break;
}
}
}
isListView() {
return document.querySelector(".items") !== null;
}
}
document.addEventListener("DOMContentLoaded", function() {
FormHandler.handleSubmitButtons();
let touchHandler = new TouchHandler();
touchHandler.listen();
let navHandler = new NavHandler();
let keyboardHandler = new KeyboardHandler();
keyboardHandler.on("g u", () => navHandler.goToPage("unread"));
keyboardHandler.on("g b", () => navHandler.goToPage("starred"));
keyboardHandler.on("g h", () => navHandler.goToPage("history"));
keyboardHandler.on("g f", () => navHandler.goToPage("feeds"));
keyboardHandler.on("g c", () => navHandler.goToPage("categories"));
keyboardHandler.on("g s", () => navHandler.goToPage("settings"));
keyboardHandler.on("ArrowLeft", () => navHandler.goToPrevious());
keyboardHandler.on("ArrowRight", () => navHandler.goToNext());
keyboardHandler.on("j", () => navHandler.goToPrevious());
keyboardHandler.on("p", () => navHandler.goToPrevious());
keyboardHandler.on("k", () => navHandler.goToNext());
keyboardHandler.on("n", () => navHandler.goToNext());
keyboardHandler.on("h", () => navHandler.goToPage("previous"));
keyboardHandler.on("l", () => navHandler.goToPage("next"));
keyboardHandler.on("o", () => navHandler.openSelectedItem());
keyboardHandler.on("v", () => navHandler.openOriginalLink());
keyboardHandler.on("m", () => navHandler.toggleEntryStatus());
keyboardHandler.on("A", () => navHandler.markPageAsRead());
keyboardHandler.on("s", () => navHandler.saveEntry());
keyboardHandler.on("d", () => navHandler.fetchOriginalContent());
keyboardHandler.on("f", () => navHandler.toggleBookmark());
keyboardHandler.on("?", () => navHandler.showKeyboardShortcuts());
keyboardHandler.on("/", (e) => navHandler.setFocusToSearchInput(e));
keyboardHandler.on("Escape", () => ModalHandler.close());
keyboardHandler.listen();
let mouseHandler = new MouseHandler();
mouseHandler.onClick("a[data-save-entry]", (event) => {
event.preventDefault();
EntryHandler.saveEntry(event.target);
});
mouseHandler.onClick("a[data-toggle-bookmark]", (event) => {
event.preventDefault();
EntryHandler.toggleBookmark(event.target);
});
mouseHandler.onClick("a[data-toggle-status]", (event) => {
event.preventDefault();
let currentItem = DomHelper.findParent(event.target, "item");
if (currentItem) {
EntryHandler.toggleEntryStatus(currentItem);
}
});
mouseHandler.onClick("a[data-fetch-content-entry]", (event) => {
event.preventDefault();
EntryHandler.fetchOriginalContent(event.target);
});
mouseHandler.onClick("a[data-on-click=markPageAsRead]", () => navHandler.markPageAsRead());
mouseHandler.onClick("a[data-confirm]", (event) => {
(new ConfirmHandler()).handle(event);
});
mouseHandler.onClick("a[data-action=search]", (event) => {
navHandler.setFocusToSearchInput(event);
});
if (document.documentElement.clientWidth < 600) {
let menuHandler = new MenuHandler();
mouseHandler.onClick(".logo", () => menuHandler.toggleMainMenu());
mouseHandler.onClick(".header nav li", (event) => menuHandler.clickMenuListItem(event));
}
});
})();

74
ui/static/js/bootstrap.js vendored Normal file
View file

@ -0,0 +1,74 @@
document.addEventListener("DOMContentLoaded", function() {
FormHandler.handleSubmitButtons();
let touchHandler = new TouchHandler();
touchHandler.listen();
let navHandler = new NavHandler();
let keyboardHandler = new KeyboardHandler();
keyboardHandler.on("g u", () => navHandler.goToPage("unread"));
keyboardHandler.on("g b", () => navHandler.goToPage("starred"));
keyboardHandler.on("g h", () => navHandler.goToPage("history"));
keyboardHandler.on("g f", () => navHandler.goToPage("feeds"));
keyboardHandler.on("g c", () => navHandler.goToPage("categories"));
keyboardHandler.on("g s", () => navHandler.goToPage("settings"));
keyboardHandler.on("ArrowLeft", () => navHandler.goToPrevious());
keyboardHandler.on("ArrowRight", () => navHandler.goToNext());
keyboardHandler.on("j", () => navHandler.goToPrevious());
keyboardHandler.on("p", () => navHandler.goToPrevious());
keyboardHandler.on("k", () => navHandler.goToNext());
keyboardHandler.on("n", () => navHandler.goToNext());
keyboardHandler.on("h", () => navHandler.goToPage("previous"));
keyboardHandler.on("l", () => navHandler.goToPage("next"));
keyboardHandler.on("o", () => navHandler.openSelectedItem());
keyboardHandler.on("v", () => navHandler.openOriginalLink());
keyboardHandler.on("m", () => navHandler.toggleEntryStatus());
keyboardHandler.on("A", () => navHandler.markPageAsRead());
keyboardHandler.on("s", () => navHandler.saveEntry());
keyboardHandler.on("d", () => navHandler.fetchOriginalContent());
keyboardHandler.on("f", () => navHandler.toggleBookmark());
keyboardHandler.on("?", () => navHandler.showKeyboardShortcuts());
keyboardHandler.on("/", (e) => navHandler.setFocusToSearchInput(e));
keyboardHandler.on("Escape", () => ModalHandler.close());
keyboardHandler.listen();
let mouseHandler = new MouseHandler();
mouseHandler.onClick("a[data-save-entry]", (event) => {
event.preventDefault();
EntryHandler.saveEntry(event.target);
});
mouseHandler.onClick("a[data-toggle-bookmark]", (event) => {
event.preventDefault();
EntryHandler.toggleBookmark(event.target);
});
mouseHandler.onClick("a[data-toggle-status]", (event) => {
event.preventDefault();
let currentItem = DomHelper.findParent(event.target, "item");
if (currentItem) {
EntryHandler.toggleEntryStatus(currentItem);
}
});
mouseHandler.onClick("a[data-fetch-content-entry]", (event) => {
event.preventDefault();
EntryHandler.fetchOriginalContent(event.target);
});
mouseHandler.onClick("a[data-on-click=markPageAsRead]", () => navHandler.markPageAsRead());
mouseHandler.onClick("a[data-confirm]", (event) => {
(new ConfirmHandler()).handle(event);
});
mouseHandler.onClick("a[data-action=search]", (event) => {
navHandler.setFocusToSearchInput(event);
});
if (document.documentElement.clientWidth < 600) {
let menuHandler = new MenuHandler();
mouseHandler.onClick(".logo", () => menuHandler.toggleMainMenu());
mouseHandler.onClick(".header nav li", (event) => menuHandler.clickMenuListItem(event));
}
});

View file

@ -0,0 +1,47 @@
class ConfirmHandler {
remove(url) {
let request = new RequestBuilder(url);
request.withCallback(() => window.location.reload());
request.execute();
}
handle(event) {
let questionElement = document.createElement("span");
let linkElement = event.target;
let containerElement = linkElement.parentNode;
linkElement.style.display = "none";
let yesElement = document.createElement("a");
yesElement.href = "#";
yesElement.appendChild(document.createTextNode(linkElement.dataset.labelYes));
yesElement.onclick = (event) => {
event.preventDefault();
let loadingElement = document.createElement("span");
loadingElement.className = "loading";
loadingElement.appendChild(document.createTextNode(linkElement.dataset.labelLoading));
questionElement.remove();
containerElement.appendChild(loadingElement);
this.remove(linkElement.dataset.url);
};
let noElement = document.createElement("a");
noElement.href = "#";
noElement.appendChild(document.createTextNode(linkElement.dataset.labelNo));
noElement.onclick = (event) => {
event.preventDefault();
linkElement.style.display = "inline";
questionElement.remove();
};
questionElement.className = "confirm";
questionElement.appendChild(document.createTextNode(linkElement.dataset.labelQuestion + " "));
questionElement.appendChild(yesElement);
questionElement.appendChild(document.createTextNode(", "));
questionElement.appendChild(noElement);
containerElement.appendChild(questionElement);
}
}

View file

@ -0,0 +1,46 @@
class DomHelper {
static isVisible(element) {
return element.offsetParent !== null;
}
static openNewTab(url) {
let win = window.open("");
win.opener = null;
win.location = url;
win.focus();
}
static scrollPageTo(element) {
let windowScrollPosition = window.pageYOffset;
let windowHeight = document.documentElement.clientHeight;
let viewportPosition = windowScrollPosition + windowHeight;
let itemBottomPosition = element.offsetTop + element.offsetHeight;
if (viewportPosition - itemBottomPosition < 0 || viewportPosition - element.offsetTop > windowHeight) {
window.scrollTo(0, element.offsetTop - 10);
}
}
static getVisibleElements(selector) {
let elements = document.querySelectorAll(selector);
let result = [];
for (let i = 0; i < elements.length; i++) {
if (this.isVisible(elements[i])) {
result.push(elements[i]);
}
}
return result;
}
static findParent(element, selector) {
for (; element && element !== document; element = element.parentNode) {
if (element.classList.contains(selector)) {
return element;
}
}
return null;
}
}

View file

@ -0,0 +1,110 @@
class EntryHandler {
static updateEntriesStatus(entryIDs, status, callback) {
let url = document.body.dataset.entriesStatusUrl;
let request = new RequestBuilder(url);
request.withBody({entry_ids: entryIDs, status: status});
request.withCallback(callback);
request.execute();
if (status === "read") {
UnreadCounterHandler.decrement(1);
} else {
UnreadCounterHandler.increment(1);
}
}
static toggleEntryStatus(element) {
let entryID = parseInt(element.dataset.id, 10);
let statuses = {read: "unread", unread: "read"};
for (let currentStatus in statuses) {
let newStatus = statuses[currentStatus];
if (element.classList.contains("item-status-" + currentStatus)) {
element.classList.remove("item-status-" + currentStatus);
element.classList.add("item-status-" + newStatus);
this.updateEntriesStatus([entryID], newStatus);
let link = element.querySelector("a[data-toggle-status]");
if (link) {
this.toggleLinkStatus(link);
}
break;
}
}
}
static toggleLinkStatus(link) {
if (link.dataset.value === "read") {
link.innerHTML = link.dataset.labelRead;
link.dataset.value = "unread";
} else {
link.innerHTML = link.dataset.labelUnread;
link.dataset.value = "read";
}
}
static toggleBookmark(element) {
element.innerHTML = element.dataset.labelLoading;
let request = new RequestBuilder(element.dataset.bookmarkUrl);
request.withCallback(() => {
if (element.dataset.value === "star") {
element.innerHTML = element.dataset.labelStar;
element.dataset.value = "unstar";
} else {
element.innerHTML = element.dataset.labelUnstar;
element.dataset.value = "star";
}
});
request.execute();
}
static markEntryAsRead(element) {
if (element.classList.contains("item-status-unread")) {
element.classList.remove("item-status-unread");
element.classList.add("item-status-read");
let entryID = parseInt(element.dataset.id, 10);
this.updateEntriesStatus([entryID], "read");
}
}
static saveEntry(element) {
if (element.dataset.completed) {
return;
}
element.innerHTML = element.dataset.labelLoading;
let request = new RequestBuilder(element.dataset.saveUrl);
request.withCallback(() => {
element.innerHTML = element.dataset.labelDone;
element.dataset.completed = true;
});
request.execute();
}
static fetchOriginalContent(element) {
if (element.dataset.completed) {
return;
}
element.innerHTML = element.dataset.labelLoading;
let request = new RequestBuilder(element.dataset.fetchContentUrl);
request.withCallback((response) => {
element.innerHTML = element.dataset.labelDone;
element.dataset.completed = true;
response.json().then((data) => {
if (data.hasOwnProperty("content")) {
document.querySelector(".entry-content").innerHTML = data.content;
}
});
});
request.execute();
}
}

View file

@ -0,0 +1,15 @@
class FormHandler {
static handleSubmitButtons() {
let elements = document.querySelectorAll("form");
elements.forEach((element) => {
element.onsubmit = () => {
let button = document.querySelector("button");
if (button) {
button.innerHTML = button.dataset.labelLoading;
button.disabled = true;
}
};
});
}
}

View file

@ -0,0 +1,63 @@
class KeyboardHandler {
constructor() {
this.queue = [];
this.shortcuts = {};
}
on(combination, callback) {
this.shortcuts[combination] = callback;
}
listen() {
document.onkeydown = (event) => {
if (this.isEventIgnored(event)) {
return;
}
let key = this.getKey(event);
this.queue.push(key);
for (let combination in this.shortcuts) {
let keys = combination.split(" ");
if (keys.every((value, index) => value === this.queue[index])) {
this.queue = [];
this.shortcuts[combination](event);
return;
}
if (keys.length === 1 && key === keys[0]) {
this.queue = [];
this.shortcuts[combination](event);
return;
}
}
if (this.queue.length >= 2) {
this.queue = [];
}
};
}
isEventIgnored(event) {
return event.target.tagName === "INPUT" || event.target.tagName === "TEXTAREA";
}
getKey(event) {
const mapping = {
'Esc': 'Escape',
'Up': 'ArrowUp',
'Down': 'ArrowDown',
'Left': 'ArrowLeft',
'Right': 'ArrowRight'
};
for (let key in mapping) {
if (mapping.hasOwnProperty(key) && key === event.key) {
return mapping[key];
}
}
return event.key;
}
}

View file

@ -0,0 +1,27 @@
class MenuHandler {
clickMenuListItem(event) {
let element = event.target;
if (element.tagName === "A") {
window.location.href = element.getAttribute("href");
} else {
window.location.href = element.querySelector("a").getAttribute("href");
}
}
toggleMainMenu() {
let menu = document.querySelector(".header nav ul");
if (DomHelper.isVisible(menu)) {
menu.style.display = "none";
} else {
menu.style.display = "block";
}
let searchElement = document.querySelector(".header .search");
if (DomHelper.isVisible(searchElement)) {
searchElement.style.display = "none";
} else {
searchElement.style.display = "block";
}
}
}

View file

@ -0,0 +1,31 @@
class ModalHandler {
static exists() {
return document.getElementById("modal-container") !== null;
}
static open(fragment) {
if (ModalHandler.exists()) {
return;
}
let container = document.createElement("div");
container.id = "modal-container";
container.appendChild(document.importNode(fragment, true));
document.body.appendChild(container);
let closeButton = document.querySelector("a.btn-close-modal");
if (closeButton !== null) {
closeButton.onclick = (event) => {
event.preventDefault();
ModalHandler.close();
};
}
}
static close() {
let container = document.getElementById("modal-container");
if (container !== null) {
container.parentNode.removeChild(container);
}
}
}

View file

@ -0,0 +1,11 @@
class MouseHandler {
onClick(selector, callback) {
let elements = document.querySelectorAll(selector);
elements.forEach((element) => {
element.onclick = (event) => {
event.preventDefault();
callback(event);
};
});
}
}

211
ui/static/js/nav_handler.js Normal file
View file

@ -0,0 +1,211 @@
class NavHandler {
setFocusToSearchInput(event) {
event.preventDefault();
event.stopPropagation();
let toggleSwitchElement = document.querySelector(".search-toggle-switch");
if (toggleSwitchElement) {
toggleSwitchElement.style.display = "none";
}
let searchFormElement = document.querySelector(".search-form");
if (searchFormElement) {
searchFormElement.style.display = "block";
}
let searchInputElement = document.getElementById("search-input");
if (searchInputElement) {
searchInputElement.focus();
searchInputElement.value = "";
}
}
showKeyboardShortcuts() {
let template = document.getElementById("keyboard-shortcuts");
if (template !== null) {
ModalHandler.open(template.content);
}
}
markPageAsRead() {
let items = DomHelper.getVisibleElements(".items .item");
let entryIDs = [];
items.forEach((element) => {
element.classList.add("item-status-read");
entryIDs.push(parseInt(element.dataset.id, 10));
});
if (entryIDs.length > 0) {
EntryHandler.updateEntriesStatus(entryIDs, "read", () => {
// This callback make sure the Ajax request reach the server before we reload the page.
this.goToPage("next", true);
});
}
}
saveEntry() {
if (this.isListView()) {
let currentItem = document.querySelector(".current-item");
if (currentItem !== null) {
let saveLink = currentItem.querySelector("a[data-save-entry]");
if (saveLink) {
EntryHandler.saveEntry(saveLink);
}
}
} else {
let saveLink = document.querySelector("a[data-save-entry]");
if (saveLink) {
EntryHandler.saveEntry(saveLink);
}
}
}
fetchOriginalContent() {
if (! this.isListView()){
let link = document.querySelector("a[data-fetch-content-entry]");
if (link) {
EntryHandler.fetchOriginalContent(link);
}
}
}
toggleEntryStatus() {
let currentItem = document.querySelector(".current-item");
if (currentItem !== null) {
// The order is important here,
// On the unread page, the read item will be hidden.
this.goToNextListItem();
EntryHandler.toggleEntryStatus(currentItem);
}
}
toggleBookmark() {
if (! this.isListView()) {
this.toggleBookmarkLink(document.querySelector(".entry"));
return;
}
let currentItem = document.querySelector(".current-item");
if (currentItem !== null) {
this.toggleBookmarkLink(currentItem);
}
}
toggleBookmarkLink(parent) {
let bookmarkLink = parent.querySelector("a[data-toggle-bookmark]");
if (bookmarkLink) {
EntryHandler.toggleBookmark(bookmarkLink);
}
}
openOriginalLink() {
let entryLink = document.querySelector(".entry h1 a");
if (entryLink !== null) {
DomHelper.openNewTab(entryLink.getAttribute("href"));
return;
}
let currentItemOriginalLink = document.querySelector(".current-item a[data-original-link]");
if (currentItemOriginalLink !== null) {
DomHelper.openNewTab(currentItemOriginalLink.getAttribute("href"));
// Move to the next item and if we are on the unread page mark this item as read.
let currentItem = document.querySelector(".current-item");
this.goToNextListItem();
EntryHandler.markEntryAsRead(currentItem);
}
}
openSelectedItem() {
let currentItemLink = document.querySelector(".current-item .item-title a");
if (currentItemLink !== null) {
window.location.href = currentItemLink.getAttribute("href");
}
}
/**
* @param {string} page Page to redirect to.
* @param {boolean} fallbackSelf Refresh actual page if the page is not found.
*/
goToPage(page, fallbackSelf) {
let element = document.querySelector("a[data-page=" + page + "]");
if (element) {
document.location.href = element.href;
} else if (fallbackSelf) {
window.location.reload();
}
}
goToPrevious() {
if (this.isListView()) {
this.goToPreviousListItem();
} else {
this.goToPage("previous");
}
}
goToNext() {
if (this.isListView()) {
this.goToNextListItem();
} else {
this.goToPage("next");
}
}
goToPreviousListItem() {
let items = DomHelper.getVisibleElements(".items .item");
if (items.length === 0) {
return;
}
if (document.querySelector(".current-item") === null) {
items[0].classList.add("current-item");
return;
}
for (let i = 0; i < items.length; i++) {
if (items[i].classList.contains("current-item")) {
items[i].classList.remove("current-item");
if (i - 1 >= 0) {
items[i - 1].classList.add("current-item");
DomHelper.scrollPageTo(items[i - 1]);
}
break;
}
}
}
goToNextListItem() {
let currentItem = document.querySelector(".current-item");
let items = DomHelper.getVisibleElements(".items .item");
if (items.length === 0) {
return;
}
if (currentItem === null) {
items[0].classList.add("current-item");
return;
}
for (let i = 0; i < items.length; i++) {
if (items[i].classList.contains("current-item")) {
items[i].classList.remove("current-item");
if (i + 1 < items.length) {
items[i + 1].classList.add("current-item");
DomHelper.scrollPageTo(items[i + 1]);
}
break;
}
}
}
isListView() {
return document.querySelector(".items") !== null;
}
}

View file

@ -0,0 +1,43 @@
class RequestBuilder {
constructor(url) {
this.callback = null;
this.url = url;
this.options = {
method: "POST",
cache: "no-cache",
credentials: "include",
body: null,
headers: new Headers({
"Content-Type": "application/json",
"X-Csrf-Token": this.getCsrfToken()
})
};
}
withBody(body) {
this.options.body = JSON.stringify(body);
return this;
}
withCallback(callback) {
this.callback = callback;
return this;
}
getCsrfToken() {
let element = document.querySelector("meta[name=X-CSRF-Token]");
if (element !== null) {
return element.getAttribute("value");
}
return "";
}
execute() {
fetch(new Request(this.url, this.options)).then((response) => {
if (this.callback) {
this.callback(response);
}
});
}
}

View file

@ -0,0 +1,94 @@
class TouchHandler {
constructor() {
this.reset();
}
reset() {
this.touch = {
start: {x: -1, y: -1},
move: {x: -1, y: -1},
element: null
};
}
calculateDistance() {
if (this.touch.start.x >= -1 && this.touch.move.x >= -1) {
let horizontalDistance = Math.abs(this.touch.move.x - this.touch.start.x);
let verticalDistance = Math.abs(this.touch.move.y - this.touch.start.y);
if (horizontalDistance > 30 && verticalDistance < 70) {
return this.touch.move.x - this.touch.start.x;
}
}
return 0;
}
findElement(element) {
if (element.classList.contains("touch-item")) {
return element;
}
return DomHelper.findParent(element, "touch-item");
}
onTouchStart(event) {
if (event.touches === undefined || event.touches.length !== 1) {
return;
}
this.reset();
this.touch.start.x = event.touches[0].clientX;
this.touch.start.y = event.touches[0].clientY;
this.touch.element = this.findElement(event.touches[0].target);
}
onTouchMove(event) {
if (event.touches === undefined || event.touches.length !== 1 || this.element === null) {
return;
}
this.touch.move.x = event.touches[0].clientX;
this.touch.move.y = event.touches[0].clientY;
let distance = this.calculateDistance();
let absDistance = Math.abs(distance);
if (absDistance > 0) {
let opacity = 1 - (absDistance > 75 ? 0.9 : absDistance / 75 * 0.9);
let tx = distance > 75 ? 75 : (distance < -75 ? -75 : distance);
this.touch.element.style.opacity = opacity;
this.touch.element.style.transform = "translateX(" + tx + "px)";
}
}
onTouchEnd(event) {
if (event.touches === undefined) {
return;
}
if (this.touch.element !== null) {
let distance = Math.abs(this.calculateDistance());
if (distance > 75) {
EntryHandler.toggleEntryStatus(this.touch.element);
}
this.touch.element.style.opacity = 1;
this.touch.element.style.transform = "none";
}
this.reset();
}
listen() {
let elements = document.querySelectorAll(".touch-item");
elements.forEach((element) => {
element.addEventListener("touchstart", (e) => this.onTouchStart(e), false);
element.addEventListener("touchmove", (e) => this.onTouchMove(e), false);
element.addEventListener("touchend", (e) => this.onTouchEnd(e), false);
element.addEventListener("touchcancel", () => this.reset(), false);
});
}
}

View file

@ -0,0 +1,33 @@
class UnreadCounterHandler {
static decrement(n) {
this.updateValue((current) => {
return current - n;
});
}
static increment(n) {
this.updateValue((current) => {
return current + n;
});
}
static updateValue(callback) {
let counterElements = document.querySelectorAll("span.unread-counter");
counterElements.forEach((element) => {
let oldValue = parseInt(element.textContent, 10);
element.innerHTML = callback(oldValue);
});
if (window.location.href.endsWith('/unread')) {
let oldValue = parseInt(document.title.split('(')[1], 10);
let newValue = callback(oldValue);
document.title = document.title.replace(
/(.*?)\(\d+\)(.*?)/,
function (match, prefix, suffix, offset, string) {
return prefix + '(' + newValue + ')' + suffix;
}
);
}
}
}

View file

@ -2,15 +2,17 @@ builds:
- binary: minify - binary: minify
main: ./cmd/minify/ main: ./cmd/minify/
ldflags: -s -w -X main.Version={{.Version}} -X main.Commit={{.Commit}} -X main.Date={{.Date}} ldflags: -s -w -X main.Version={{.Version}} -X main.Commit={{.Commit}} -X main.Date={{.Date}}
env:
- CGO_ENABLED=0
goos: goos:
- windows
- linux - linux
- windows
- darwin - darwin
- freebsd
- netbsd
- openbsd
goarch: goarch:
- amd64 - amd64
- 386
- arm
- arm64
archive: archive:
format: tar.gz format: tar.gz
format_overrides: format_overrides:

View file

@ -58,16 +58,16 @@ The core functionality associates mimetypes with minification functions, allowin
- [ ] General speed-up of all minifiers (use ASM for whitespace funcs) - [ ] General speed-up of all minifiers (use ASM for whitespace funcs)
- [ ] Improve JS minifiers by shortening variables and proper semicolon omission - [ ] Improve JS minifiers by shortening variables and proper semicolon omission
- [ ] Speed-up SVG minifier, it is very slow - [ ] Speed-up SVG minifier, it is very slow
- [ ] Proper parser error reporting and line number + column information - [x] Proper parser error reporting and line number + column information
- [ ] Generation of source maps (uncertain, might slow down parsers too much if it cannot run separately nicely) - [ ] Generation of source maps (uncertain, might slow down parsers too much if it cannot run separately nicely)
- [ ] Look into compression of images, fonts and other web resources (into package `compress`?) - [ ] Look into compression of images, fonts and other web resources (into package `compress`)?
- [ ] Create a cmd to pack webfiles (much like webpack), ie. merging CSS and JS files, inlining small external files, minification and gzipping. This would work on HTML files. - [ ] Create a cmd to pack webfiles (much like webpack), ie. merging CSS and JS files, inlining small external files, minification and gzipping. This would work on HTML files.
- [ ] Create a package to format files, much like `gofmt` for Go files - [ ] Create a package to format files, much like `gofmt` for Go files?
## Prologue ## Prologue
Minifiers or bindings to minifiers exist in almost all programming languages. Some implementations are merely using several regular-expressions to trim whitespace and comments (even though regex for parsing HTML/XML is ill-advised, for a good read see [Regular Expressions: Now You Have Two Problems](http://blog.codinghorror.com/regular-expressions-now-you-have-two-problems/)). Some implementations are much more profound, such as the [YUI Compressor](http://yui.github.io/yuicompressor/) and [Google Closure Compiler](https://github.com/google/closure-compiler) for JS. As most existing implementations either use Java or JavaScript and don't focus on performance, they are pretty slow. Additionally, loading the whole file into memory at once is bad for really large files (or impossible for streams). Minifiers or bindings to minifiers exist in almost all programming languages. Some implementations are merely using several regular-expressions to trim whitespace and comments (even though regex for parsing HTML/XML is ill-advised, for a good read see [Regular Expressions: Now You Have Two Problems](http://blog.codinghorror.com/regular-expressions-now-you-have-two-problems/)). Some implementations are much more profound, such as the [YUI Compressor](http://yui.github.io/yuicompressor/) and [Google Closure Compiler](https://github.com/google/closure-compiler) for JS. As most existing implementations either use JavaScript, use regexes, and don't focus on performance, they are pretty slow.
This minifier proves to be that fast and extensive minifier that can handle HTML and any other filetype it may contain (CSS, JS, ...). It streams the input and output and can minify files concurrently. This minifier proves to be that fast and extensive minifier that can handle HTML and any other filetype it may contain (CSS, JS, ...). It is usually orders of magnitude faster than existing minifiers.
## Installation ## Installation
Run the following command Run the following command
@ -225,7 +225,7 @@ Options:
The JS minifier is pretty basic. It removes comments, whitespace and line breaks whenever it can. It employs all the rules that [JSMin](http://www.crockford.com/javascript/jsmin.html) does too, but has additional improvements. For example the prefix-postfix bug is fixed. The JS minifier is pretty basic. It removes comments, whitespace and line breaks whenever it can. It employs all the rules that [JSMin](http://www.crockford.com/javascript/jsmin.html) does too, but has additional improvements. For example the prefix-postfix bug is fixed.
Common speeds of PHP and JS implementations are about 100-300kB/s (see [Uglify2](http://lisperator.net/uglifyjs/), [Adventures in PHP web asset minimization](https://www.happyassassin.net/2014/12/29/adventures-in-php-web-asset-minimization/)). This implementation or orders of magnitude faster, around ~50MB/s. Common speeds of PHP and JS implementations are about 100-300kB/s (see [Uglify2](http://lisperator.net/uglifyjs/), [Adventures in PHP web asset minimization](https://www.happyassassin.net/2014/12/29/adventures-in-php-web-asset-minimization/)). This implementation or orders of magnitude faster, around ~80MB/s.
TODO: TODO:
- shorten local variables / function parameters names - shorten local variables / function parameters names
@ -246,7 +246,6 @@ The SVG minifier uses these minifications:
- strip SVG version - strip SVG version
- strip CDATA sections wherever possible - strip CDATA sections wherever possible
- collapse tags with no content to a void tag - collapse tags with no content to a void tag
- collapse empty container tags (`g`, `svg`, ...)
- minify style tag and attributes with the CSS minifier - minify style tag and attributes with the CSS minifier
- minify colors - minify colors
- shorten lengths and numbers and remove default `px` unit - shorten lengths and numbers and remove default `px` unit

View file

@ -13,50 +13,34 @@ Run the following command
and the `minify` command will be in your `$GOPATH/bin`. and the `minify` command will be in your `$GOPATH/bin`.
## Usage You can enable bash tab completion by using
source minify_bash_tab_completion
## Usage
Usage: minify [options] [input] Usage: minify [options] [input]
Options: Options:
-a, --all -a, --all Minify all files, including hidden files and files in hidden directories
Minify all files, including hidden files and files in hidden directories --css-decimals int Number of decimals to preserve in numbers, -1 is all (default -1)
-l, --list -h, --help Show usage
List all accepted filetypes --html-keep-conditional-comments Preserve all IE conditional comments
--match string --html-keep-default-attrvals Preserve default attribute values
Filename pattern matching using regular expressions, see https://github.com/google/re2/wiki/Syntax --html-keep-document-tags Preserve html, head and body tags
--mime string --html-keep-end-tags Preserve all end tags
Mimetype (text/css, application/javascript, ...), optional for input filenames, has precedence over -type --html-keep-whitespace Preserve whitespace characters but still collapse multiple into one
-o, --output string -l, --list List all accepted filetypes
Output file or directory (must have trailing slash), leave blank to use stdout --match string Filename pattern matching using regular expressions
-r, --recursive --mime string Mimetype (eg. text/css), optional for input filenames, has precedence over -type
Recursively minify directories -o, --output string Output file or directory (must have trailing slash), leave blank to use stdout
--type string -r, --recursive Recursively minify directories
Filetype (css, html, js, ...), optional for input filenames --svg-decimals int Number of decimals to preserve in numbers, -1 is all (default -1)
-u, --update --type string Filetype (eg. css), optional for input filenames
Update binary --url string URL of file to enable URL minification
--url string -v, --verbose Verbose
URL of file to enable URL minification --version Version
-v, --verbose -w, --watch Watch files and minify upon changes
Verbose --xml-keep-whitespace Preserve whitespace characters but still collapse multiple into one
-w, --watch
Watch files and minify upon changes
--css-decimals
Number of decimals to preserve in numbers, -1 is all
--html-keep-conditional-comments
Preserve all IE conditional comments
--html-keep-default-attrvals
Preserve default attribute values
--html-keep-document-tags
Preserve html, head and body tags
--html-keep-end-tags
Preserve all end tags
--html-keep-whitespace
Preserve whitespace characters but still collapse multiple into one
--svg-decimals
Number of decimals to preserve in numbers, -1 is all
--xml-keep-whitespace
Preserve whitespace characters but still collapse multiple into one
Input: Input:
Files or directories, leave blank to use stdin Files or directories, leave blank to use stdin

View file

@ -15,7 +15,6 @@ import (
"runtime" "runtime"
"sort" "sort"
"strings" "strings"
"sync/atomic"
"time" "time"
humanize "github.com/dustin/go-humanize" humanize "github.com/dustin/go-humanize"
@ -45,6 +44,7 @@ var filetypeMime = map[string]string{
} }
var ( var (
help bool
hidden bool hidden bool
list bool list bool
m *min.M m *min.M
@ -55,7 +55,7 @@ var (
watch bool watch bool
) )
type task struct { type Task struct {
srcs []string srcs []string
srcDir string srcDir string
dst string dst string
@ -80,15 +80,18 @@ func main() {
svgMinifier := &svg.Minifier{} svgMinifier := &svg.Minifier{}
xmlMinifier := &xml.Minifier{} xmlMinifier := &xml.Minifier{}
flag := flag.NewFlagSet("minify", flag.ContinueOnError)
flag.Usage = func() { flag.Usage = func() {
fmt.Fprintf(os.Stderr, "Usage: %s [options] [input]\n\nOptions:\n", os.Args[0]) fmt.Fprintf(os.Stderr, "Usage: %s [options] [input]\n\nOptions:\n", os.Args[0])
flag.PrintDefaults() flag.PrintDefaults()
fmt.Fprintf(os.Stderr, "\nInput:\n Files or directories, leave blank to use stdin\n") fmt.Fprintf(os.Stderr, "\nInput:\n Files or directories, leave blank to use stdin\n")
} }
flag.BoolVarP(&help, "help", "h", false, "Show usage")
flag.StringVarP(&output, "output", "o", "", "Output file or directory (must have trailing slash), leave blank to use stdout") flag.StringVarP(&output, "output", "o", "", "Output file or directory (must have trailing slash), leave blank to use stdout")
flag.StringVar(&mimetype, "mime", "", "Mimetype (text/css, application/javascript, ...), optional for input filenames, has precedence over -type") flag.StringVar(&mimetype, "mime", "", "Mimetype (eg. text/css), optional for input filenames, has precedence over -type")
flag.StringVar(&filetype, "type", "", "Filetype (css, html, js, ...), optional for input filenames") flag.StringVar(&filetype, "type", "", "Filetype (eg. css), optional for input filenames")
flag.StringVar(&match, "match", "", "Filename pattern matching using regular expressions, see https://github.com/google/re2/wiki/Syntax") flag.StringVar(&match, "match", "", "Filename pattern matching using regular expressions")
flag.BoolVarP(&recursive, "recursive", "r", false, "Recursively minify directories") flag.BoolVarP(&recursive, "recursive", "r", false, "Recursively minify directories")
flag.BoolVarP(&hidden, "all", "a", false, "Minify all files, including hidden files and files in hidden directories") flag.BoolVarP(&hidden, "all", "a", false, "Minify all files, including hidden files and files in hidden directories")
flag.BoolVarP(&list, "list", "l", false, "List all accepted filetypes") flag.BoolVarP(&list, "list", "l", false, "List all accepted filetypes")
@ -105,7 +108,11 @@ func main() {
flag.BoolVar(&htmlMinifier.KeepWhitespace, "html-keep-whitespace", false, "Preserve whitespace characters but still collapse multiple into one") flag.BoolVar(&htmlMinifier.KeepWhitespace, "html-keep-whitespace", false, "Preserve whitespace characters but still collapse multiple into one")
flag.IntVar(&svgMinifier.Decimals, "svg-decimals", -1, "Number of decimals to preserve in numbers, -1 is all") flag.IntVar(&svgMinifier.Decimals, "svg-decimals", -1, "Number of decimals to preserve in numbers, -1 is all")
flag.BoolVar(&xmlMinifier.KeepWhitespace, "xml-keep-whitespace", false, "Preserve whitespace characters but still collapse multiple into one") flag.BoolVar(&xmlMinifier.KeepWhitespace, "xml-keep-whitespace", false, "Preserve whitespace characters but still collapse multiple into one")
flag.Parse() if err := flag.Parse(os.Args[1:]); err != nil {
fmt.Printf("Error: %v\n\n", err)
flag.Usage()
os.Exit(2)
}
rawInputs := flag.Args() rawInputs := flag.Args()
Error = log.New(os.Stderr, "ERROR: ", 0) Error = log.New(os.Stderr, "ERROR: ", 0)
@ -115,13 +122,18 @@ func main() {
Info = log.New(ioutil.Discard, "INFO: ", 0) Info = log.New(ioutil.Discard, "INFO: ", 0)
} }
if help {
flag.Usage()
os.Exit(0)
}
if version { if version {
if Version == "devel" { if Version == "devel" {
fmt.Printf("minify version devel+%.7s %s\n", Commit, Date) fmt.Printf("minify version devel+%.7s %s\n", Commit, Date)
} else { } else {
fmt.Printf("minify version %s\n", Version) fmt.Printf("minify version %s\n", Version)
} }
return os.Exit(0)
} }
if list { if list {
@ -133,7 +145,7 @@ func main() {
for _, k := range keys { for _, k := range keys {
fmt.Println(k + "\t" + filetypeMime[k]) fmt.Println(k + "\t" + filetypeMime[k])
} }
return os.Exit(0)
} }
useStdin := len(rawInputs) == 0 useStdin := len(rawInputs) == 0
@ -148,7 +160,11 @@ func main() {
} }
if watch && (useStdin || output == "") { if watch && (useStdin || output == "") {
Error.Fatalln("watch doesn't work with stdin or stdout") Error.Fatalln("watch doesn't work on stdin and stdout, specify input and output")
}
if recursive && (useStdin || output == "") {
Error.Fatalln("recursive minification doesn't work on stdin and stdout, specify input and output")
} }
//////////////// ////////////////
@ -174,7 +190,7 @@ func main() {
} }
if len(tasks) == 0 { if len(tasks) == 0 {
tasks = append(tasks, task{[]string{""}, "", output}) // stdin tasks = append(tasks, Task{[]string{""}, "", output}) // stdin
} }
m = min.New() m = min.New()
@ -191,47 +207,33 @@ func main() {
start := time.Now() start := time.Now()
var fails int32 chanTasks := make(chan Task, 100)
if verbose || len(tasks) == 1 { chanFails := make(chan int, 100)
for _, t := range tasks {
if ok := minify(mimetype, t); !ok { numWorkers := 1
fails++ if !verbose && len(tasks) > 1 {
} numWorkers = 4
}
} else {
numWorkers := 4
if n := runtime.NumCPU(); n > numWorkers { if n := runtime.NumCPU(); n > numWorkers {
numWorkers = n numWorkers = n
} }
sem := make(chan struct{}, numWorkers)
for _, t := range tasks {
sem <- struct{}{}
go func(t task) {
defer func() {
<-sem
}()
if ok := minify(mimetype, t); !ok {
atomic.AddInt32(&fails, 1)
}
}(t)
} }
// wait for all jobs to be done for n := 0; n < numWorkers; n++ {
for i := 0; i < cap(sem); i++ { go minifyWorker(mimetype, chanTasks, chanFails)
sem <- struct{}{}
} }
for _, task := range tasks {
chanTasks <- task
} }
if watch { if watch {
var watcher *RecursiveWatcher watcher, err := NewRecursiveWatcher(recursive)
watcher, err = NewRecursiveWatcher(recursive)
if err != nil { if err != nil {
Error.Fatalln(err) Error.Fatalln(err)
} }
defer watcher.Close() defer watcher.Close()
var watcherTasks = make(map[string]task, len(rawInputs)) watcherTasks := make(map[string]Task, len(rawInputs))
for _, task := range tasks { for _, task := range tasks {
for _, src := range task.srcs { for _, src := range task.srcs {
watcherTasks[src] = task watcherTasks[src] = task
@ -248,6 +250,7 @@ func main() {
select { select {
case <-c: case <-c:
watcher.Close() watcher.Close()
fmt.Printf("\n")
case file, ok := <-changes: case file, ok := <-changes:
if !ok { if !ok {
changes = nil changes = nil
@ -260,10 +263,10 @@ func main() {
continue continue
} }
var t task var t Task
if t, ok = watcherTasks[file]; ok { if t, ok = watcherTasks[file]; ok {
if !verbose { if !verbose {
fmt.Fprintln(os.Stderr, file, "changed") Info.Println(file, "changed")
} }
for _, src := range t.srcs { for _, src := range t.srcs {
if src == t.dst { if src == t.dst {
@ -271,21 +274,35 @@ func main() {
break break
} }
} }
if ok := minify(mimetype, t); !ok { chanTasks <- t
fails++
} }
} }
} }
} }
fails := 0
close(chanTasks)
for n := 0; n < numWorkers; n++ {
fails += <-chanFails
} }
if verbose { if verbose {
Info.Println(time.Since(start), "total") Info.Println(time.Since(start), "total")
} }
if fails > 0 { if fails > 0 {
os.Exit(1) os.Exit(1)
} }
os.Exit(0)
}
func minifyWorker(mimetype string, chanTasks <-chan Task, chanFails chan<- int) {
fails := 0
for task := range chanTasks {
if ok := minify(mimetype, task); !ok {
fails++
}
}
chanFails <- fails
} }
func getMimetype(mimetype, filetype string, useStdin bool) string { func getMimetype(mimetype, filetype string, useStdin bool) string {
@ -344,9 +361,9 @@ func validDir(info os.FileInfo) bool {
return info.Mode().IsDir() && len(info.Name()) > 0 && (hidden || info.Name()[0] != '.') return info.Mode().IsDir() && len(info.Name()) > 0 && (hidden || info.Name()[0] != '.')
} }
func expandInputs(inputs []string, dirDst bool) ([]task, bool) { func expandInputs(inputs []string, dirDst bool) ([]Task, bool) {
ok := true ok := true
tasks := []task{} tasks := []Task{}
for _, input := range inputs { for _, input := range inputs {
input = sanitizePath(input) input = sanitizePath(input)
info, err := os.Stat(input) info, err := os.Stat(input)
@ -357,7 +374,7 @@ func expandInputs(inputs []string, dirDst bool) ([]task, bool) {
} }
if info.Mode().IsRegular() { if info.Mode().IsRegular() {
tasks = append(tasks, task{[]string{filepath.ToSlash(input)}, "", ""}) tasks = append(tasks, Task{[]string{filepath.ToSlash(input)}, "", ""})
} else if info.Mode().IsDir() { } else if info.Mode().IsDir() {
expandDir(input, &tasks, &ok) expandDir(input, &tasks, &ok)
} else { } else {
@ -391,7 +408,7 @@ func expandInputs(inputs []string, dirDst bool) ([]task, bool) {
return tasks, ok return tasks, ok
} }
func expandDir(input string, tasks *[]task, ok *bool) { func expandDir(input string, tasks *[]Task, ok *bool) {
if !recursive { if !recursive {
if verbose { if verbose {
Info.Println("expanding directory", input) Info.Println("expanding directory", input)
@ -404,7 +421,7 @@ func expandDir(input string, tasks *[]task, ok *bool) {
} }
for _, info := range infos { for _, info := range infos {
if validFile(info) { if validFile(info) {
*tasks = append(*tasks, task{[]string{path.Join(input, info.Name())}, input, ""}) *tasks = append(*tasks, Task{[]string{path.Join(input, info.Name())}, input, ""})
} }
} }
} else { } else {
@ -417,7 +434,7 @@ func expandDir(input string, tasks *[]task, ok *bool) {
return err return err
} }
if validFile(info) { if validFile(info) {
*tasks = append(*tasks, task{[]string{filepath.ToSlash(path)}, input, ""}) *tasks = append(*tasks, Task{[]string{filepath.ToSlash(path)}, input, ""})
} else if info.Mode().IsDir() && !validDir(info) && info.Name() != "." && info.Name() != ".." { // check for IsDir, so we don't skip the rest of the directory when we have an invalid file } else if info.Mode().IsDir() && !validDir(info) && info.Name() != "." && info.Name() != ".." { // check for IsDir, so we don't skip the rest of the directory when we have an invalid file
return filepath.SkipDir return filepath.SkipDir
} }
@ -430,7 +447,7 @@ func expandDir(input string, tasks *[]task, ok *bool) {
} }
} }
func expandOutputs(output string, tasks *[]task) bool { func expandOutputs(output string, tasks *[]Task) bool {
if verbose { if verbose {
if output == "" { if output == "" {
Info.Println("minify to stdout") Info.Println("minify to stdout")
@ -459,7 +476,7 @@ func expandOutputs(output string, tasks *[]task) bool {
return ok return ok
} }
func getOutputFilename(output string, t task) (string, error) { func getOutputFilename(output string, t Task) (string, error) {
if len(output) > 0 && output[len(output)-1] == '/' { if len(output) > 0 && output[len(output)-1] == '/' {
rel, err := filepath.Rel(t.srcDir, t.srcs[0]) rel, err := filepath.Rel(t.srcDir, t.srcs[0])
if err != nil { if err != nil {
@ -470,47 +487,44 @@ func getOutputFilename(output string, t task) (string, error) {
return output, nil return output, nil
} }
func openInputFile(input string) (*os.File, bool) { func openInputFile(input string) (io.ReadCloser, error) {
var r *os.File var r *os.File
if input == "" { if input == "" {
r = os.Stdin r = os.Stdin
} else { } else {
err := try.Do(func(attempt int) (bool, error) { err := try.Do(func(attempt int) (bool, error) {
var err error var ferr error
r, err = os.Open(input) r, ferr = os.Open(input)
return attempt < 5, err return attempt < 5, ferr
}) })
if err != nil { if err != nil {
Error.Println(err) return nil, err
return nil, false
} }
} }
return r, true return r, nil
} }
func openOutputFile(output string) (*os.File, bool) { func openOutputFile(output string) (*os.File, error) {
var w *os.File var w *os.File
if output == "" { if output == "" {
w = os.Stdout w = os.Stdout
} else { } else {
if err := os.MkdirAll(path.Dir(output), 0777); err != nil { if err := os.MkdirAll(path.Dir(output), 0777); err != nil {
Error.Println(err) return nil, err
return nil, false
} }
err := try.Do(func(attempt int) (bool, error) { err := try.Do(func(attempt int) (bool, error) {
var err error var ferr error
w, err = os.OpenFile(output, os.O_WRONLY|os.O_TRUNC|os.O_CREATE, 0666) w, ferr = os.OpenFile(output, os.O_WRONLY|os.O_TRUNC|os.O_CREATE, 0666)
return attempt < 5, err return attempt < 5, ferr
}) })
if err != nil { if err != nil {
Error.Println(err) return nil, err
return nil, false
} }
} }
return w, true return w, nil
} }
func minify(mimetype string, t task) bool { func minify(mimetype string, t Task) bool {
if mimetype == "" { if mimetype == "" {
for _, src := range t.srcs { for _, src := range t.srcs {
if len(path.Ext(src)) > 0 { if len(path.Ext(src)) > 0 {
@ -545,8 +559,8 @@ func minify(mimetype string, t task) bool {
if t.srcs[i] == t.dst { if t.srcs[i] == t.dst {
t.srcs[i] += ".bak" t.srcs[i] += ".bak"
err := try.Do(func(attempt int) (bool, error) { err := try.Do(func(attempt int) (bool, error) {
err := os.Rename(t.dst, t.srcs[i]) ferr := os.Rename(t.dst, t.srcs[i])
return attempt < 5, err return attempt < 5, ferr
}) })
if err != nil { if err != nil {
Error.Println(err) Error.Println(err)
@ -557,42 +571,32 @@ func minify(mimetype string, t task) bool {
} }
} }
frs := make([]io.Reader, len(t.srcs)) fr, err := NewConcatFileReader(t.srcs, openInputFile)
for i, src := range t.srcs { if err != nil {
fr, ok := openInputFile(src) Error.Println(err)
if !ok {
for _, fr := range frs {
fr.(io.ReadCloser).Close()
}
return false return false
} }
if i > 0 && mimetype == filetypeMime["js"] { if mimetype == filetypeMime["js"] {
// prepend newline when concatenating JS files fr.SetSeparator([]byte("\n"))
frs[i] = NewPrependReader(fr, []byte("\n"))
} else {
frs[i] = fr
} }
} r := NewCountingReader(fr)
r := &countingReader{io.MultiReader(frs...), 0}
fw, ok := openOutputFile(t.dst) fw, err := openOutputFile(t.dst)
if !ok { if err != nil {
for _, fr := range frs { Error.Println(err)
fr.(io.ReadCloser).Close() fr.Close()
}
return false return false
} }
var w *countingWriter var w *countingWriter
if fw == os.Stdout { if fw == os.Stdout {
w = &countingWriter{fw, 0} w = NewCountingWriter(fw)
} else { } else {
w = &countingWriter{bufio.NewWriter(fw), 0} w = NewCountingWriter(bufio.NewWriter(fw))
} }
success := true success := true
startTime := time.Now() startTime := time.Now()
err := m.Minify(mimetype, w, r) if err = m.Minify(mimetype, w, r); err != nil {
if err != nil {
Error.Println("cannot minify "+srcName+":", err) Error.Println("cannot minify "+srcName+":", err)
success = false success = false
} }
@ -615,9 +619,7 @@ func minify(mimetype string, t task) bool {
} }
} }
for _, fr := range frs { fr.Close()
fr.(io.ReadCloser).Close()
}
if bw, ok := w.Writer.(*bufio.Writer); ok { if bw, ok := w.Writer.(*bufio.Writer); ok {
bw.Flush() bw.Flush()
} }

View file

@ -0,0 +1,29 @@
#!/bin/bash
_minify_complete()
{
local cur_word prev_word flags mimes types
cur_word="${COMP_WORDS[COMP_CWORD]}"
prev_word="${COMP_WORDS[COMP_CWORD-1]}"
flags="-a --all -l --list --match --mime -o --output -r --recursive --type --url -v --verbose --version -w --watch --css-decimals --html-keep-conditional-comments --html-keep-default-attrvals --html-keep-document-tags --html-keep-end-tags --html-keep-whitespace --svg-decimals --xml-keep-whitespace"
mimes="text/css text/html text/javascript application/json image/svg+xml text/xml"
types="css html js json svg xml"
if [[ ${cur_word} == -* ]] ; then
COMPREPLY=( $(compgen -W "${flags}" -- ${cur_word}) )
elif [[ ${prev_word} =~ ^--mime$ ]] ; then
COMPREPLY=( $(compgen -W "${mimes}" -- ${cur_word}) )
elif [[ ${prev_word} =~ ^--type$ ]] ; then
COMPREPLY=( $(compgen -W "${types}" -- ${cur_word}) )
elif [[ ${prev_word} =~ ^--(match|url|css-decimals|svg-decimals)$ ]] ; then
compopt +o default
COMPREPLY=()
else
compopt -o default
COMPREPLY=()
fi
return 0
}
complete -F _minify_complete minify

View file

@ -1,12 +1,18 @@
package main package main
import "io" import (
"io"
)
type countingReader struct { type countingReader struct {
io.Reader io.Reader
N int N int
} }
func NewCountingReader(r io.Reader) *countingReader {
return &countingReader{r, 0}
}
func (r *countingReader) Read(p []byte) (int, error) { func (r *countingReader) Read(p []byte) (int, error) {
n, err := r.Reader.Read(p) n, err := r.Reader.Read(p)
r.N += n r.N += n
@ -18,29 +24,95 @@ type countingWriter struct {
N int N int
} }
func NewCountingWriter(w io.Writer) *countingWriter {
return &countingWriter{w, 0}
}
func (w *countingWriter) Write(p []byte) (int, error) { func (w *countingWriter) Write(p []byte) (int, error) {
n, err := w.Writer.Write(p) n, err := w.Writer.Write(p)
w.N += n w.N += n
return n, err return n, err
} }
type prependReader struct { type eofReader struct{}
io.ReadCloser
prepend []byte func (r eofReader) Read(p []byte) (int, error) {
return 0, io.EOF
} }
func NewPrependReader(r io.ReadCloser, prepend []byte) *prependReader { func (r eofReader) Close() error {
return &prependReader{r, prepend} return nil
} }
func (r *prependReader) Read(p []byte) (int, error) { type concatFileReader struct {
if r.prepend != nil { filenames []string
n := copy(p, r.prepend) opener func(string) (io.ReadCloser, error)
if n != len(r.prepend) { sep []byte
return n, io.ErrShortBuffer
} cur io.ReadCloser
r.prepend = nil sepLeft int
return n, nil }
}
return r.ReadCloser.Read(p) // NewConcatFileReader reads from a list of filenames, and lazily loads files as it needs it.
// It is a reader that reads a concatenation of those files separated by the separator.
// You must call Close to close the last file in the list.
func NewConcatFileReader(filenames []string, opener func(string) (io.ReadCloser, error)) (*concatFileReader, error) {
var cur io.ReadCloser
if len(filenames) > 0 {
var filename string
filename, filenames = filenames[0], filenames[1:]
var err error
if cur, err = opener(filename); err != nil {
return nil, err
}
} else {
cur = eofReader{}
}
return &concatFileReader{filenames, opener, nil, cur, 0}, nil
}
func (r *concatFileReader) SetSeparator(sep []byte) {
r.sep = sep
}
func (r *concatFileReader) Read(p []byte) (int, error) {
m := r.writeSep(p)
n, err := r.cur.Read(p[m:])
n += m
// current reader is finished, load in the new reader
if err == io.EOF && len(r.filenames) > 0 {
if err := r.cur.Close(); err != nil {
return n, err
}
var filename string
filename, r.filenames = r.filenames[0], r.filenames[1:]
if r.cur, err = r.opener(filename); err != nil {
return n, err
}
r.sepLeft = len(r.sep)
// if previous read returned (0, io.EOF), read from the new reader
if n == 0 {
return r.Read(p)
} else {
n += r.writeSep(p[n:])
}
}
return n, err
}
func (r *concatFileReader) writeSep(p []byte) int {
m := 0
if r.sepLeft > 0 {
m = copy(p, r.sep[len(r.sep)-r.sepLeft:])
r.sepLeft -= m
}
return m
}
func (r *concatFileReader) Close() error {
return r.cur.Close()
} }

View file

@ -0,0 +1,152 @@
package main
import (
"bytes"
"io"
"io/ioutil"
"testing"
"github.com/tdewolff/test"
)
func testOpener(filename string) (io.ReadCloser, error) {
if filename == "err" {
return nil, test.ErrPlain
} else if filename == "empty" {
return ioutil.NopCloser(test.NewEmptyReader()), nil
}
return ioutil.NopCloser(bytes.NewReader([]byte(filename))), nil
}
func TestConcat(t *testing.T) {
r, err := NewConcatFileReader([]string{"test", "test"}, testOpener)
test.T(t, err, nil)
buf, err := ioutil.ReadAll(r)
test.T(t, err, nil)
test.Bytes(t, buf, []byte("testtest"))
n, err := r.Read(buf)
test.T(t, n, 0)
test.T(t, err, io.EOF)
}
func TestConcatErr(t *testing.T) {
r, err := NewConcatFileReader([]string{"err"}, testOpener)
test.T(t, err, test.ErrPlain)
r, err = NewConcatFileReader([]string{"test", "err"}, testOpener)
test.T(t, err, nil)
buf := make([]byte, 10)
n, err := r.Read(buf)
test.T(t, n, 4)
test.T(t, err, nil)
test.Bytes(t, buf[:n], []byte("test"))
n, err = r.Read(buf)
test.T(t, n, 0)
test.T(t, err, test.ErrPlain)
}
func TestConcatSep(t *testing.T) {
r, err := NewConcatFileReader([]string{"test", "test"}, testOpener)
test.T(t, err, nil)
r.SetSeparator([]byte("_"))
buf := make([]byte, 10)
n, err := r.Read(buf)
test.T(t, n, 4)
test.T(t, err, nil)
test.Bytes(t, buf[:n], []byte("test"))
n, err = r.Read(buf[n:])
test.T(t, n, 5)
test.T(t, err, nil)
test.Bytes(t, buf[:4+n], []byte("test_test"))
}
func TestConcatSepShort1(t *testing.T) {
r, err := NewConcatFileReader([]string{"test", "test"}, testOpener)
test.T(t, err, nil)
r.SetSeparator([]byte("_"))
// insufficient room for separator
buf := make([]byte, 4)
n, err := r.Read(buf)
test.T(t, n, 4)
test.T(t, err, nil)
test.Bytes(t, buf, []byte("test"))
n, err = r.Read(buf[4:])
test.T(t, n, 0)
test.T(t, err, nil)
}
func TestConcatSepShort2(t *testing.T) {
r, err := NewConcatFileReader([]string{"test", "test"}, testOpener)
test.T(t, err, nil)
r.SetSeparator([]byte("_"))
// insufficient room after separator
buf := make([]byte, 5)
_, _ = r.Read(buf)
n, err := r.Read(buf[4:])
test.T(t, n, 1)
test.T(t, err, nil)
test.Bytes(t, buf, []byte("test_"))
}
func TestConcatSepShort3(t *testing.T) {
r, err := NewConcatFileReader([]string{"test", "test"}, testOpener)
test.T(t, err, nil)
r.SetSeparator([]byte("_"))
// insufficient room after separator
buf := make([]byte, 6)
_, _ = r.Read(buf)
n, err := r.Read(buf[4:])
test.T(t, n, 2)
test.T(t, err, nil)
test.Bytes(t, buf, []byte("test_t"))
}
func TestConcatSepShort4(t *testing.T) {
r, err := NewConcatFileReader([]string{"test", "test"}, testOpener)
test.T(t, err, nil)
r.SetSeparator([]byte("xx"))
// insufficient room after separator
buf := make([]byte, 5)
_, _ = r.Read(buf)
n, err := r.Read(buf[4:])
test.T(t, n, 1)
test.T(t, err, nil)
test.Bytes(t, buf, []byte("testx"))
n, err = r.Read(buf[5:])
test.T(t, n, 0)
test.T(t, err, nil)
buf2 := make([]byte, 5)
n, err = r.Read(buf2)
test.T(t, n, 5)
test.T(t, err, nil)
test.Bytes(t, buf2, []byte("xtest"))
}
func TestConcatSepEmpty(t *testing.T) {
r, err := NewConcatFileReader([]string{"empty", "empty"}, testOpener)
test.T(t, err, nil)
r.SetSeparator([]byte("_"))
// insufficient room after separator
buf := make([]byte, 1)
n, err := r.Read(buf)
test.T(t, n, 1)
test.T(t, err, io.EOF)
test.Bytes(t, buf, []byte("_"))
}

View file

@ -87,7 +87,7 @@ func (rw *RecursiveWatcher) Run() chan string {
} }
} }
} else if validFile(info) { } else if validFile(info) {
if event.Op&fsnotify.Create == fsnotify.Create || event.Op&fsnotify.Write == fsnotify.Write { if event.Op&fsnotify.Write == fsnotify.Write {
files <- event.Name files <- event.Name
} }
} }

View file

@ -12,8 +12,8 @@ import (
// Epsilon is the closest number to zero that is not considered to be zero. // Epsilon is the closest number to zero that is not considered to be zero.
var Epsilon = 0.00001 var Epsilon = 0.00001
// ContentType minifies a given mediatype by removing all whitespace. // Mediatype minifies a given mediatype by removing all whitespace.
func ContentType(b []byte) []byte { func Mediatype(b []byte) []byte {
j := 0 j := 0
start := 0 start := 0
inString := false inString := false
@ -79,6 +79,107 @@ func DataURI(m *M, dataURI []byte) []byte {
const MaxInt = int(^uint(0) >> 1) const MaxInt = int(^uint(0) >> 1)
const MinInt = -MaxInt - 1 const MinInt = -MaxInt - 1
// Decimal minifies a given byte slice containing a number (see parse.Number) and removes superfluous characters.
// It does not parse or output exponents.
func Decimal(num []byte, prec int) []byte {
// omit first + and register mantissa start and end, whether it's negative and the exponent
neg := false
start := 0
dot := -1
end := len(num)
if 0 < end && (num[0] == '+' || num[0] == '-') {
if num[0] == '-' {
neg = true
}
start++
}
for i, c := range num[start:] {
if c == '.' {
dot = start + i
break
}
}
if dot == -1 {
dot = end
}
// trim leading zeros but leave at least one digit
for start < end-1 && num[start] == '0' {
start++
}
// trim trailing zeros
i := end - 1
for ; i > dot; i-- {
if num[i] != '0' {
end = i + 1
break
}
}
if i == dot {
end = dot
if start == end {
num[start] = '0'
return num[start : start+1]
}
} else if start == end-1 && num[start] == '0' {
return num[start:end]
}
// apply precision
if prec > -1 && dot+1+prec < end {
end = dot + 1 + prec
inc := num[end] >= '5'
if inc || num[end-1] == '0' {
for i := end - 1; i > start; i-- {
if i == dot {
end--
} else if inc {
if num[i] == '9' {
if i > dot {
end--
} else {
num[i] = '0'
}
} else {
num[i]++
inc = false
break
}
} else if i > dot && num[i] == '0' {
end--
}
}
}
if dot == start && end == start+1 {
if inc {
num[start] = '1'
} else {
num[start] = '0'
}
} else {
if dot+1 == end {
end--
}
if inc {
if num[start] == '9' {
num[start] = '0'
copy(num[start+1:], num[start:end])
end++
num[start] = '1'
} else {
num[start]++
}
}
}
}
if neg {
start--
num[start] = '-'
}
return num[start:end]
}
// Number minifies a given byte slice containing a number (see parse.Number) and removes superfluous characters. // Number minifies a given byte slice containing a number (see parse.Number) and removes superfluous characters.
func Number(num []byte, prec int) []byte { func Number(num []byte, prec int) []byte {
// omit first + and register mantissa start and end, whether it's negative and the exponent // omit first + and register mantissa start and end, whether it's negative and the exponent
@ -311,6 +412,20 @@ func Number(num []byte, prec int) []byte {
} }
} else { } else {
// case 3 // case 3
// find new end, considering moving numbers to the front, removing the dot and increasing the length of the exponent
newEnd := end
if dot == start {
newEnd = start + n
} else {
newEnd--
}
newEnd += 2 + lenIntExp
exp := intExp
lenExp := lenIntExp
if newEnd < len(num) {
// it saves space to convert the decimal to an integer and decrease the exponent
if dot < end { if dot < end {
if dot == start { if dot == start {
copy(num[start:], num[end-n:end]) copy(num[start:], num[end-n:end])
@ -320,15 +435,23 @@ func Number(num []byte, prec int) []byte {
end-- end--
} }
} }
} else {
// it does not save space and will panic, so we revert to the original representation
exp = origExp
lenExp = 1
if origExp <= -10 || origExp >= 10 {
lenExp = strconv.LenInt(int64(origExp))
}
}
num[end] = 'e' num[end] = 'e'
num[end+1] = '-' num[end+1] = '-'
end += 2 end += 2
intExp = -intExp exp = -exp
for i := end + lenIntExp - 1; i >= end; i-- { for i := end + lenExp - 1; i >= end; i-- {
num[i] = byte(intExp%10) + '0' num[i] = byte(exp%10) + '0'
intExp /= 10 exp /= 10
} }
end += lenIntExp end += lenExp
} }
if neg { if neg {

View file

@ -12,9 +12,9 @@ import (
"github.com/tdewolff/test" "github.com/tdewolff/test"
) )
func TestContentType(t *testing.T) { func TestMediatype(t *testing.T) {
contentTypeTests := []struct { mediatypeTests := []struct {
contentType string mediatype string
expected string expected string
}{ }{
{"text/html", "text/html"}, {"text/html", "text/html"},
@ -22,10 +22,10 @@ func TestContentType(t *testing.T) {
{"text/html; charset=UTF-8 ; param = \" ; \"", "text/html;charset=utf-8;param=\" ; \""}, {"text/html; charset=UTF-8 ; param = \" ; \"", "text/html;charset=utf-8;param=\" ; \""},
{"text/html, text/css", "text/html,text/css"}, {"text/html, text/css", "text/html,text/css"},
} }
for _, tt := range contentTypeTests { for _, tt := range mediatypeTests {
t.Run(tt.contentType, func(t *testing.T) { t.Run(tt.mediatype, func(t *testing.T) {
contentType := ContentType([]byte(tt.contentType)) mediatype := Mediatype([]byte(tt.mediatype))
test.Minify(t, tt.contentType, nil, string(contentType), tt.expected) test.Minify(t, tt.mediatype, nil, string(mediatype), tt.expected)
}) })
} }
} }
@ -62,6 +62,72 @@ func TestDataURI(t *testing.T) {
} }
} }
func TestDecimal(t *testing.T) {
numberTests := []struct {
number string
expected string
}{
{"0", "0"},
{".0", "0"},
{"1.0", "1"},
{"0.1", ".1"},
{"+1", "1"},
{"-1", "-1"},
{"-0.1", "-.1"},
{"10", "10"},
{"100", "100"},
{"1000", "1000"},
{"0.001", ".001"},
{"0.0001", ".0001"},
{"0.252", ".252"},
{"1.252", "1.252"},
{"-1.252", "-1.252"},
{"0.075", ".075"},
{"789012345678901234567890123456789e9234567890123456789", "789012345678901234567890123456789e9234567890123456789"},
{".000100009", ".000100009"},
{".0001000009", ".0001000009"},
{".0001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009", ".0001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009"},
{"E\x1f", "E\x1f"}, // fuzz
}
for _, tt := range numberTests {
t.Run(tt.number, func(t *testing.T) {
number := Decimal([]byte(tt.number), -1)
test.Minify(t, tt.number, nil, string(number), tt.expected)
})
}
}
func TestDecimalTruncate(t *testing.T) {
numberTests := []struct {
number string
truncate int
expected string
}{
{"0.1", 1, ".1"},
{"0.0001", 1, "0"},
{"0.111", 1, ".1"},
{"0.111", 0, "0"},
{"0.075", 1, ".1"},
{"0.025", 1, "0"},
{"9.99", 1, "10"},
{"8.88", 1, "8.9"},
{"8.88", 0, "9"},
{"8.00", 0, "8"},
{".88", 0, "1"},
{"1.234", 1, "1.2"},
{"33.33", 0, "33"},
{"29.666", 0, "30"},
{"1.51", 1, "1.5"},
{"1.01", 1, "1"},
}
for _, tt := range numberTests {
t.Run(tt.number, func(t *testing.T) {
number := Decimal([]byte(tt.number), tt.truncate)
test.Minify(t, tt.number, nil, string(number), tt.expected, "truncate to", tt.truncate)
})
}
}
func TestNumber(t *testing.T) { func TestNumber(t *testing.T) {
numberTests := []struct { numberTests := []struct {
number string number string
@ -82,6 +148,8 @@ func TestNumber(t *testing.T) {
{"100e1", "1e3"}, {"100e1", "1e3"},
{"1.1e+1", "11"}, {"1.1e+1", "11"},
{"1.1e6", "11e5"}, {"1.1e6", "11e5"},
{"1.1e", "1.1e"}, // broken number, don't parse
{"1.1e+", "1.1e+"}, // broken number, don't parse
{"0.252", ".252"}, {"0.252", ".252"},
{"1.252", "1.252"}, {"1.252", "1.252"},
{"-1.252", "-1.252"}, {"-1.252", "-1.252"},
@ -90,6 +158,7 @@ func TestNumber(t *testing.T) {
{".000100009", "100009e-9"}, {".000100009", "100009e-9"},
{".0001000009", ".0001000009"}, {".0001000009", ".0001000009"},
{".0001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009", ".0001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009"}, {".0001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009", ".0001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009"},
{".6000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003e-9", ".6000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003e-9"},
{"E\x1f", "E\x1f"}, // fuzz {"E\x1f", "E\x1f"}, // fuzz
{"1e9223372036854775807", "1e9223372036854775807"}, {"1e9223372036854775807", "1e9223372036854775807"},
{"11e9223372036854775807", "11e9223372036854775807"}, {"11e9223372036854775807", "11e9223372036854775807"},
@ -108,11 +177,11 @@ func TestNumber(t *testing.T) {
{".12345e-2", ".0012345"}, {".12345e-2", ".0012345"},
{".12345e-3", "12345e-8"}, {".12345e-3", "12345e-8"},
{".12345e-4", "12345e-9"}, {".12345e-4", "12345e-9"},
{".12345e-5", "12345e-10"}, {".12345e-5", ".12345e-5"},
{".123456e-3", "123456e-9"}, {".123456e-3", "123456e-9"},
{".123456e-2", ".00123456"}, {".123456e-2", ".00123456"},
{".1234567e-4", "1234567e-11"}, {".1234567e-4", ".1234567e-4"},
{".1234567e-3", ".0001234567"}, {".1234567e-3", ".0001234567"},
{"12345678e-1", "1234567.8"}, {"12345678e-1", "1234567.8"},
@ -155,6 +224,7 @@ func TestNumberTruncate(t *testing.T) {
{"33.33", 0, "33"}, {"33.33", 0, "33"},
{"29.666", 0, "30"}, {"29.666", 0, "30"},
{"1.51", 1, "1.5"}, {"1.51", 1, "1.5"},
{"1.01", 1, "1"},
} }
for _, tt := range numberTests { for _, tt := range numberTests {
t.Run(tt.number, func(t *testing.T) { t.Run(tt.number, func(t *testing.T) {
@ -164,13 +234,32 @@ func TestNumberTruncate(t *testing.T) {
} }
} }
func TestDecimalRandom(t *testing.T) {
N := int(1e4)
if testing.Short() {
N = 0
}
for i := 0; i < N; i++ {
b := RandNumBytes(false)
f, _ := strconv.ParseFloat(string(b), 64)
b2 := make([]byte, len(b))
copy(b2, b)
b2 = Decimal(b2, -1)
f2, _ := strconv.ParseFloat(string(b2), 64)
if math.Abs(f-f2) > 1e-6 {
fmt.Println("Bad:", f, "!=", f2, "in", string(b), "to", string(b2))
}
}
}
func TestNumberRandom(t *testing.T) { func TestNumberRandom(t *testing.T) {
N := int(1e4) N := int(1e4)
if testing.Short() { if testing.Short() {
N = 0 N = 0
} }
for i := 0; i < N; i++ { for i := 0; i < N; i++ {
b := RandNumBytes() b := RandNumBytes(true)
f, _ := strconv.ParseFloat(string(b), 64) f, _ := strconv.ParseFloat(string(b), 64)
b2 := make([]byte, len(b)) b2 := make([]byte, len(b))
@ -191,11 +280,11 @@ var numbers [][]byte
func TestMain(t *testing.T) { func TestMain(t *testing.T) {
numbers = make([][]byte, 0, n) numbers = make([][]byte, 0, n)
for j := 0; j < n; j++ { for j := 0; j < n; j++ {
numbers = append(numbers, RandNumBytes()) numbers = append(numbers, RandNumBytes(true))
} }
} }
func RandNumBytes() []byte { func RandNumBytes(withExp bool) []byte {
var b []byte var b []byte
n := rand.Int() % 10 n := rand.Int() % 10
for i := 0; i < n; i++ { for i := 0; i < n; i++ {
@ -208,7 +297,7 @@ func RandNumBytes() []byte {
b = append(b, byte(rand.Int()%10)+'0') b = append(b, byte(rand.Int()%10)+'0')
} }
} }
if rand.Int()%2 == 0 { if withExp && rand.Int()%2 == 0 {
b = append(b, 'e') b = append(b, 'e')
if rand.Int()%2 == 0 { if rand.Int()%2 == 0 {
b = append(b, '-') b = append(b, '-')

View file

@ -4,6 +4,7 @@ package css // import "github.com/tdewolff/minify/css"
import ( import (
"bytes" "bytes"
"encoding/hex" "encoding/hex"
"fmt"
"io" "io"
"strconv" "strconv"
@ -29,16 +30,19 @@ type cssMinifier struct {
w io.Writer w io.Writer
p *css.Parser p *css.Parser
o *Minifier o *Minifier
valuesBuffer []Token
} }
//////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////
// DefaultMinifier is the default minifier. // DefaultMinifier is the default minifier.
var DefaultMinifier = &Minifier{Decimals: -1} var DefaultMinifier = &Minifier{Decimals: -1, KeepCSS2: false}
// Minifier is a CSS minifier. // Minifier is a CSS minifier.
type Minifier struct { type Minifier struct {
Decimals int Decimals int
KeepCSS2 bool
} }
// Minify minifies CSS data, it reads from r and writes to w. // Minify minifies CSS data, it reads from r and writes to w.
@ -108,7 +112,19 @@ func (c *cssMinifier) minifyGrammar() error {
if _, err := c.w.Write(data); err != nil { if _, err := c.w.Write(data); err != nil {
return err return err
} }
for _, val := range c.p.Values() { values := c.p.Values()
if css.ToHash(data[1:]) == css.Import && len(values) == 2 && values[1].TokenType == css.URLToken {
url := values[1].Data
if url[4] != '"' && url[4] != '\'' {
url = url[3:]
url[0] = '"'
url[len(url)-1] = '"'
} else {
url = url[4 : len(url)-1]
}
values[1].Data = url
}
for _, val := range values {
if _, err := c.w.Write(val.Data); err != nil { if _, err := c.w.Write(val.Data); err != nil {
return err return err
} }
@ -216,35 +232,138 @@ func (c *cssMinifier) minifySelectors(property []byte, values []css.Token) error
return nil return nil
} }
func (c *cssMinifier) minifyDeclaration(property []byte, values []css.Token) error { type Token struct {
if len(values) == 0 { css.TokenType
Data []byte
Components []css.Token // only filled for functions
}
func (t Token) String() string {
if len(t.Components) == 0 {
return t.TokenType.String() + "(" + string(t.Data) + ")"
} else {
return fmt.Sprint(t.Components)
}
}
func (c *cssMinifier) minifyDeclaration(property []byte, components []css.Token) error {
if len(components) == 0 {
return nil return nil
} }
prop := css.ToHash(property) prop := css.ToHash(property)
inProgid := false
// Strip !important from the component list, this will be added later separately
important := false
if len(components) > 2 && components[len(components)-2].TokenType == css.DelimToken && components[len(components)-2].Data[0] == '!' && css.ToHash(components[len(components)-1].Data) == css.Important {
components = components[:len(components)-2]
important = true
}
// Check if this is a simple list of values separated by whitespace or commas, otherwise we'll not be processing
simple := true
prevSep := true
values := c.valuesBuffer[:0]
for i := 0; i < len(components); i++ {
comp := components[i]
if comp.TokenType == css.LeftParenthesisToken || comp.TokenType == css.LeftBraceToken || comp.TokenType == css.LeftBracketToken || comp.TokenType == css.RightParenthesisToken || comp.TokenType == css.RightBraceToken || comp.TokenType == css.RightBracketToken {
simple = false
break
}
if !prevSep && comp.TokenType != css.WhitespaceToken && comp.TokenType != css.CommaToken {
simple = false
break
}
if comp.TokenType == css.WhitespaceToken || comp.TokenType == css.CommaToken {
prevSep = true
if comp.TokenType == css.CommaToken {
values = append(values, Token{components[i].TokenType, components[i].Data, nil})
}
} else if comp.TokenType == css.FunctionToken {
prevSep = false
j := i + 1
level := 0
for ; j < len(components); j++ {
if components[j].TokenType == css.LeftParenthesisToken {
level++
} else if components[j].TokenType == css.RightParenthesisToken {
if level == 0 {
j++
break
}
level--
}
}
values = append(values, Token{components[i].TokenType, components[i].Data, components[i:j]})
i = j - 1
} else {
prevSep = false
values = append(values, Token{components[i].TokenType, components[i].Data, nil})
}
}
c.valuesBuffer = values
// Do not process complex values (eg. containing blocks or is not alternated between whitespace/commas and flat values
if !simple {
if prop == css.Filter && len(components) == 11 {
if bytes.Equal(components[0].Data, []byte("progid")) &&
components[1].TokenType == css.ColonToken &&
bytes.Equal(components[2].Data, []byte("DXImageTransform")) &&
components[3].Data[0] == '.' &&
bytes.Equal(components[4].Data, []byte("Microsoft")) &&
components[5].Data[0] == '.' &&
bytes.Equal(components[6].Data, []byte("Alpha(")) &&
bytes.Equal(parse.ToLower(components[7].Data), []byte("opacity")) &&
components[8].Data[0] == '=' &&
components[10].Data[0] == ')' {
components = components[6:]
components[0].Data = []byte("alpha(")
}
}
for _, component := range components {
if _, err := c.w.Write(component.Data); err != nil {
return err
}
}
if important {
if _, err := c.w.Write([]byte("!important")); err != nil {
return err
}
}
return nil
}
for i := range values {
values[i].TokenType, values[i].Data = c.shortenToken(prop, values[i].TokenType, values[i].Data)
}
if len(values) > 0 {
switch prop {
case css.Font, css.Font_Weight, css.Font_Family:
if prop == css.Font {
// in "font:" shorthand all values before the size have "normal"
// as valid and, at the same time, default value, so just skip them
for i, value := range values { for i, value := range values {
if inProgid { if !(value.TokenType == css.IdentToken && css.ToHash(value.Data) == css.Normal) {
if value.TokenType == css.FunctionToken { values = values[i:]
inProgid = false break
} }
continue
} else if value.TokenType == css.IdentToken && css.ToHash(value.Data) == css.Progid {
inProgid = true
continue
} }
value.TokenType, value.Data = c.shortenToken(prop, value.TokenType, value.Data) }
if prop == css.Font || prop == css.Font_Family || prop == css.Font_Weight { for i, value := range values {
if value.TokenType == css.IdentToken && (prop == css.Font || prop == css.Font_Weight) { if value.TokenType == css.IdentToken {
val := css.ToHash(value.Data) val := css.ToHash(value.Data)
if val == css.Normal && prop == css.Font_Weight { if prop == css.Font_Weight && val == css.Normal {
// normal could also be specified for font-variant, not just font-weight values[i].TokenType = css.NumberToken
value.TokenType = css.NumberToken values[i].Data = []byte("400")
value.Data = []byte("400")
} else if val == css.Bold { } else if val == css.Bold {
value.TokenType = css.NumberToken values[i].TokenType = css.NumberToken
value.Data = []byte("700") values[i].Data = []byte("700")
} }
} else if value.TokenType == css.StringToken && (prop == css.Font || prop == css.Font_Family) && len(value.Data) > 2 { } else if value.TokenType == css.StringToken && len(value.Data) > 2 {
unquote := true unquote := true
parse.ToLower(value.Data) parse.ToLower(value.Data)
s := value.Data[1 : len(value.Data)-1] s := value.Data[1 : len(value.Data)-1]
@ -260,94 +379,91 @@ func (c *cssMinifier) minifyDeclaration(property []byte, values []css.Token) err
} }
} }
if unquote { if unquote {
value.Data = s values[i].Data = s
} }
} }
} else if prop == css.Outline || prop == css.Border || prop == css.Border_Bottom || prop == css.Border_Left || prop == css.Border_Right || prop == css.Border_Top { }
if css.ToHash(value.Data) == css.None { case css.Margin, css.Padding, css.Border_Width:
value.TokenType = css.NumberToken n := len(values)
value.Data = zeroBytes if n == 2 {
if bytes.Equal(values[0].Data, values[1].Data) {
values = values[:1]
}
} else if n == 3 {
if bytes.Equal(values[0].Data, values[1].Data) && bytes.Equal(values[0].Data, values[2].Data) {
values = values[:1]
} else if bytes.Equal(values[0].Data, values[2].Data) {
values = values[:2]
}
} else if n == 4 {
if bytes.Equal(values[0].Data, values[1].Data) && bytes.Equal(values[0].Data, values[2].Data) && bytes.Equal(values[0].Data, values[3].Data) {
values = values[:1]
} else if bytes.Equal(values[0].Data, values[2].Data) && bytes.Equal(values[1].Data, values[3].Data) {
values = values[:2]
} else if bytes.Equal(values[1].Data, values[3].Data) {
values = values[:3]
} }
} }
values[i].TokenType, values[i].Data = value.TokenType, value.Data case css.Outline, css.Border, css.Border_Bottom, css.Border_Left, css.Border_Right, css.Border_Top:
none := false
iZero := -1
for i, value := range values {
if len(value.Data) == 1 && value.Data[0] == '0' {
iZero = i
} else if css.ToHash(value.Data) == css.None {
values[i].TokenType = css.NumberToken
values[i].Data = zeroBytes
none = true
} }
important := false
if len(values) > 2 && values[len(values)-2].TokenType == css.DelimToken && values[len(values)-2].Data[0] == '!' && css.ToHash(values[len(values)-1].Data) == css.Important {
values = values[:len(values)-2]
important = true
} }
if none && iZero != -1 {
if len(values) == 1 { values = append(values[:iZero], values[iZero+1:]...)
if prop == css.Background && css.ToHash(values[0].Data) == css.None { }
case css.Background:
ident := css.ToHash(values[0].Data)
if len(values) == 1 && (ident == css.None || ident == css.Transparent) {
values[0].Data = backgroundNoneBytes values[0].Data = backgroundNoneBytes
} else if bytes.Equal(property, msfilterBytes) { }
case css.Box_Shadow:
if len(values) == 4 && len(values[0].Data) == 1 && values[0].Data[0] == '0' && len(values[1].Data) == 1 && values[1].Data[0] == '0' && len(values[2].Data) == 1 && values[2].Data[0] == '0' && len(values[3].Data) == 1 && values[3].Data[0] == '0' {
values = values[:2]
}
default:
if bytes.Equal(property, msfilterBytes) {
alpha := []byte("progid:DXImageTransform.Microsoft.Alpha(Opacity=") alpha := []byte("progid:DXImageTransform.Microsoft.Alpha(Opacity=")
if values[0].TokenType == css.StringToken && bytes.HasPrefix(values[0].Data[1:len(values[0].Data)-1], alpha) { if values[0].TokenType == css.StringToken && bytes.HasPrefix(values[0].Data[1:len(values[0].Data)-1], alpha) {
values[0].Data = append(append([]byte{values[0].Data[0]}, []byte("alpha(opacity=")...), values[0].Data[1+len(alpha):]...) values[0].Data = append(append([]byte{values[0].Data[0]}, []byte("alpha(opacity=")...), values[0].Data[1+len(alpha):]...)
} }
} }
} else {
if prop == css.Margin || prop == css.Padding || prop == css.Border_Width {
if (values[0].TokenType == css.NumberToken || values[0].TokenType == css.DimensionToken || values[0].TokenType == css.PercentageToken) && (len(values)+1)%2 == 0 {
valid := true
for i := 1; i < len(values); i += 2 {
if values[i].TokenType != css.WhitespaceToken || values[i+1].TokenType != css.NumberToken && values[i+1].TokenType != css.DimensionToken && values[i+1].TokenType != css.PercentageToken {
valid = false
break
}
}
if valid {
n := (len(values) + 1) / 2
if n == 2 {
if bytes.Equal(values[0].Data, values[2].Data) {
values = values[:1]
}
} else if n == 3 {
if bytes.Equal(values[0].Data, values[2].Data) && bytes.Equal(values[0].Data, values[4].Data) {
values = values[:1]
} else if bytes.Equal(values[0].Data, values[4].Data) {
values = values[:3]
}
} else if n == 4 {
if bytes.Equal(values[0].Data, values[2].Data) && bytes.Equal(values[0].Data, values[4].Data) && bytes.Equal(values[0].Data, values[6].Data) {
values = values[:1]
} else if bytes.Equal(values[0].Data, values[4].Data) && bytes.Equal(values[2].Data, values[6].Data) {
values = values[:3]
} else if bytes.Equal(values[2].Data, values[6].Data) {
values = values[:5]
}
}
}
}
} else if prop == css.Filter && len(values) == 11 {
if bytes.Equal(values[0].Data, []byte("progid")) &&
values[1].TokenType == css.ColonToken &&
bytes.Equal(values[2].Data, []byte("DXImageTransform")) &&
values[3].Data[0] == '.' &&
bytes.Equal(values[4].Data, []byte("Microsoft")) &&
values[5].Data[0] == '.' &&
bytes.Equal(values[6].Data, []byte("Alpha(")) &&
bytes.Equal(parse.ToLower(values[7].Data), []byte("opacity")) &&
values[8].Data[0] == '=' &&
values[10].Data[0] == ')' {
values = values[6:]
values[0].Data = []byte("alpha(")
}
} }
} }
for i := 0; i < len(values); i++ { prevComma := true
if values[i].TokenType == css.FunctionToken { for _, value := range values {
n, err := c.minifyFunction(values[i:]) if !prevComma && value.TokenType != css.CommaToken {
if _, err := c.w.Write([]byte(" ")); err != nil {
return err
}
}
if value.TokenType == css.FunctionToken {
err := c.minifyFunction(value.Components)
if err != nil { if err != nil {
return err return err
} }
i += n - 1 } else {
} else if _, err := c.w.Write(values[i].Data); err != nil { if _, err := c.w.Write(value.Data); err != nil {
return err return err
} }
} }
if value.TokenType == css.CommaToken {
prevComma = true
} else {
prevComma = false
}
}
if important { if important {
if _, err := c.w.Write([]byte("!important")); err != nil { if _, err := c.w.Write([]byte("!important")); err != nil {
return err return err
@ -356,22 +472,22 @@ func (c *cssMinifier) minifyDeclaration(property []byte, values []css.Token) err
return nil return nil
} }
func (c *cssMinifier) minifyFunction(values []css.Token) (int, error) { func (c *cssMinifier) minifyFunction(values []css.Token) error {
n := 1 n := len(values)
if n > 2 {
simple := true simple := true
for i, value := range values[1:] { for i, value := range values[1 : n-1] {
if value.TokenType == css.RightParenthesisToken {
n++
break
}
if i%2 == 0 && (value.TokenType != css.NumberToken && value.TokenType != css.PercentageToken) || (i%2 == 1 && value.TokenType != css.CommaToken) { if i%2 == 0 && (value.TokenType != css.NumberToken && value.TokenType != css.PercentageToken) || (i%2 == 1 && value.TokenType != css.CommaToken) {
simple = false simple = false
} }
n++
} }
values = values[:n]
if simple && (n-1)%2 == 0 { if simple && n%2 == 1 {
fun := css.ToHash(values[0].Data[:len(values[0].Data)-1]) fun := css.ToHash(values[0].Data[0 : len(values[0].Data)-1])
for i := 1; i < n; i += 2 {
values[i].TokenType, values[i].Data = c.shortenToken(0, values[i].TokenType, values[i].Data)
}
nArgs := (n - 1) / 2 nArgs := (n - 1) / 2
if (fun == css.Rgba || fun == css.Hsla) && nArgs == 4 { if (fun == css.Rgba || fun == css.Hsla) && nArgs == 4 {
d, _ := strconv.ParseFloat(string(values[7].Data), 32) // can never fail because if simple == true than this is a NumberToken or PercentageToken d, _ := strconv.ParseFloat(string(values[7].Data), 32) // can never fail because if simple == true than this is a NumberToken or PercentageToken
@ -425,7 +541,7 @@ func (c *cssMinifier) minifyFunction(values []css.Token) (int, error) {
parse.ToLower(val) parse.ToLower(val)
if s, ok := ShortenColorHex[string(val)]; ok { if s, ok := ShortenColorHex[string(val)]; ok {
if _, err := c.w.Write(s); err != nil { if _, err := c.w.Write(s); err != nil {
return 0, err return err
} }
} else { } else {
if len(val) == 7 && val[1] == val[2] && val[3] == val[4] && val[5] == val[6] { if len(val) == 7 && val[1] == val[2] && val[3] == val[4] && val[5] == val[6] {
@ -434,10 +550,10 @@ func (c *cssMinifier) minifyFunction(values []css.Token) (int, error) {
val = val[:4] val = val[:4]
} }
if _, err := c.w.Write(val); err != nil { if _, err := c.w.Write(val); err != nil {
return 0, err return err
} }
} }
return n, nil return nil
} }
} else if fun == css.Hsl && nArgs == 3 { } else if fun == css.Hsl && nArgs == 3 {
if values[1].TokenType == css.NumberToken && values[3].TokenType == css.PercentageToken && values[5].TokenType == css.PercentageToken { if values[1].TokenType == css.NumberToken && values[3].TokenType == css.PercentageToken && values[5].TokenType == css.PercentageToken {
@ -453,7 +569,7 @@ func (c *cssMinifier) minifyFunction(values []css.Token) (int, error) {
parse.ToLower(val) parse.ToLower(val)
if s, ok := ShortenColorHex[string(val)]; ok { if s, ok := ShortenColorHex[string(val)]; ok {
if _, err := c.w.Write(s); err != nil { if _, err := c.w.Write(s); err != nil {
return 0, err return err
} }
} else { } else {
if len(val) == 7 && val[1] == val[2] && val[3] == val[4] && val[5] == val[6] { if len(val) == 7 && val[1] == val[2] && val[3] == val[4] && val[5] == val[6] {
@ -462,20 +578,22 @@ func (c *cssMinifier) minifyFunction(values []css.Token) (int, error) {
val = val[:4] val = val[:4]
} }
if _, err := c.w.Write(val); err != nil { if _, err := c.w.Write(val); err != nil {
return 0, err return err
} }
} }
return n, nil return nil
} }
} }
} }
} }
}
for _, value := range values { for _, value := range values {
if _, err := c.w.Write(value.Data); err != nil { if _, err := c.w.Write(value.Data); err != nil {
return 0, err return err
} }
} }
return n, nil return nil
} }
func (c *cssMinifier) shortenToken(prop css.Hash, tt css.TokenType, data []byte) (css.TokenType, []byte) { func (c *cssMinifier) shortenToken(prop css.Hash, tt css.TokenType, data []byte) (css.TokenType, []byte) {
@ -491,11 +609,15 @@ func (c *cssMinifier) shortenToken(prop css.Hash, tt css.TokenType, data []byte)
} }
dim := data[n:] dim := data[n:]
parse.ToLower(dim) parse.ToLower(dim)
if !c.o.KeepCSS2 {
data = minify.Number(data[:n], c.o.Decimals) data = minify.Number(data[:n], c.o.Decimals)
if tt == css.PercentageToken && (len(data) != 1 || data[0] != '0' || prop == css.Color) { } else {
data = append(data, '%') data = minify.Decimal(data[:n], c.o.Decimals) // don't use exponents
} else if tt == css.DimensionToken && (len(data) != 1 || data[0] != '0' || requiredDimension[string(dim)]) { }
if tt == css.DimensionToken && (len(data) != 1 || data[0] != '0' || !optionalZeroDimension[string(dim)] || prop == css.Flex) {
data = append(data, dim...) data = append(data, dim...)
} else if tt == css.PercentageToken {
data = append(data, '%') // TODO: drop percentage for properties that accept <percentage> and <length>
} }
} else if tt == css.IdentToken { } else if tt == css.IdentToken {
//parse.ToLower(data) // TODO: not all identifiers are case-insensitive; all <custom-ident> properties are case-sensitive //parse.ToLower(data) // TODO: not all identifiers are case-insensitive; all <custom-ident> properties are case-sensitive
@ -541,7 +663,7 @@ func (c *cssMinifier) shortenToken(prop css.Hash, tt css.TokenType, data []byte)
} else if tt == css.URLToken { } else if tt == css.URLToken {
parse.ToLower(data[:3]) parse.ToLower(data[:3])
if len(data) > 10 { if len(data) > 10 {
uri := data[4 : len(data)-1] uri := parse.TrimWhitespace(data[4 : len(data)-1])
delim := byte('"') delim := byte('"')
if uri[0] == '\'' || uri[0] == '"' { if uri[0] == '\'' || uri[0] == '"' {
delim = uri[0] delim = uri[0]

View file

@ -23,6 +23,8 @@ func TestCSS(t *testing.T) {
{".cla[id ^= L] { x:y; }", ".cla[id^=L]{x:y}"}, {".cla[id ^= L] { x:y; }", ".cla[id^=L]{x:y}"},
{"area:focus { outline : 0;}", "area:focus{outline:0}"}, {"area:focus { outline : 0;}", "area:focus{outline:0}"},
{"@import 'file';", "@import 'file'"}, {"@import 'file';", "@import 'file'"},
{"@import url('file');", "@import 'file'"},
{"@import url(//url);", `@import "//url"`},
{"@font-face { x:y; }", "@font-face{x:y}"}, {"@font-face { x:y; }", "@font-face{x:y}"},
{"input[type=\"radio\"]{x:y}", "input[type=radio]{x:y}"}, {"input[type=\"radio\"]{x:y}", "input[type=radio]{x:y}"},
@ -51,6 +53,7 @@ func TestCSS(t *testing.T) {
// go-fuzz // go-fuzz
{"input[type=\"\x00\"] { a: b\n}.a{}", "input[type=\"\x00\"]{a:b}.a{}"}, {"input[type=\"\x00\"] { a: b\n}.a{}", "input[type=\"\x00\"]{a:b}.a{}"},
{"a{a:)'''", "a{a:)'''}"}, {"a{a:)'''", "a{a:)'''}"},
{"{T:l(", "{t:l(}"},
} }
m := minify.New() m := minify.New()
@ -91,11 +94,17 @@ func TestCSSInline(t *testing.T) {
{"color: hsla(1,2%,3%,1);", "color:#080807"}, {"color: hsla(1,2%,3%,1);", "color:#080807"},
{"color: hsla(1,2%,3%,0);", "color:transparent"}, {"color: hsla(1,2%,3%,0);", "color:transparent"},
{"color: hsl(48,100%,50%);", "color:#fc0"}, {"color: hsl(48,100%,50%);", "color:#fc0"},
{"background: hsla(0,0%,100%,.7);", "background:hsla(0,0%,100%,.7)"},
{"font-weight: bold; font-weight: normal;", "font-weight:700;font-weight:400"}, {"font-weight: bold; font-weight: normal;", "font-weight:700;font-weight:400"},
{"font: bold \"Times new Roman\",\"Sans-Serif\";", "font:700 times new roman,\"sans-serif\""}, {"font: bold \"Times new Roman\",\"Sans-Serif\";", "font:700 times new roman,\"sans-serif\""},
{"font: normal normal normal normal 20px normal", "font:20px normal"},
{"outline: none;", "outline:0"}, {"outline: none;", "outline:0"},
{"outline: solid black 0;", "outline:solid #000 0"},
{"outline: none black 5px;", "outline:0 #000 5px"},
{"outline: none !important;", "outline:0!important"}, {"outline: none !important;", "outline:0!important"},
{"border-left: none;", "border-left:0"}, {"border-left: none;", "border-left:0"},
{"border-left: none 0;", "border-left:0"},
{"border-left: 0 dashed red;", "border-left:0 dashed red"},
{"margin: 1 1 1 1;", "margin:1"}, {"margin: 1 1 1 1;", "margin:1"},
{"margin: 1 2 1 2;", "margin:1 2"}, {"margin: 1 2 1 2;", "margin:1 2"},
{"margin: 1 2 3 2;", "margin:1 2 3"}, {"margin: 1 2 3 2;", "margin:1 2 3"},
@ -106,11 +115,13 @@ func TestCSSInline(t *testing.T) {
{"margin: 0em;", "margin:0"}, {"margin: 0em;", "margin:0"},
{"font-family:'Arial', 'Times New Roman';", "font-family:arial,times new roman"}, {"font-family:'Arial', 'Times New Roman';", "font-family:arial,times new roman"},
{"background:url('http://domain.com/image.png');", "background:url(http://domain.com/image.png)"}, {"background:url('http://domain.com/image.png');", "background:url(http://domain.com/image.png)"},
{"background:url( 'http://domain.com/image.png' );", "background:url(http://domain.com/image.png)"},
{"filter: progid : DXImageTransform.Microsoft.BasicImage(rotation=1);", "filter:progid:DXImageTransform.Microsoft.BasicImage(rotation=1)"}, {"filter: progid : DXImageTransform.Microsoft.BasicImage(rotation=1);", "filter:progid:DXImageTransform.Microsoft.BasicImage(rotation=1)"},
{"filter: progid:DXImageTransform.Microsoft.Alpha(Opacity=0);", "filter:alpha(opacity=0)"}, {"filter: progid:DXImageTransform.Microsoft.Alpha(Opacity=0);", "filter:alpha(opacity=0)"},
{"content: \"a\\\nb\";", "content:\"ab\""}, {"content: \"a\\\nb\";", "content:\"ab\""},
{"content: \"a\\\r\nb\\\r\nc\";", "content:\"abc\""}, {"content: \"a\\\r\nb\\\r\nc\";", "content:\"abc\""},
{"content: \"\";", "content:\"\""}, {"content: \"\";", "content:\"\""},
{"x: white , white", "x:#fff,#fff"},
{"font:27px/13px arial,sans-serif", "font:27px/13px arial,sans-serif"}, {"font:27px/13px arial,sans-serif", "font:27px/13px arial,sans-serif"},
{"text-decoration: none !important", "text-decoration:none!important"}, {"text-decoration: none !important", "text-decoration:none!important"},
@ -139,9 +150,15 @@ func TestCSSInline(t *testing.T) {
{"margin:0 0 18px 0;", "margin:0 0 18px"}, {"margin:0 0 18px 0;", "margin:0 0 18px"},
{"background:none", "background:0 0"}, {"background:none", "background:0 0"},
{"background:none 1 1", "background:none 1 1"}, {"background:none 1 1", "background:none 1 1"},
{"background:transparent", "background:0 0"},
{"background:transparent no-repeat", "background:transparent no-repeat"},
{"z-index:1000", "z-index:1000"}, {"z-index:1000", "z-index:1000"},
{"box-shadow:0 0 0 0", "box-shadow:0 0"},
{"flex:0px", "flex:0px"},
{"any:0deg 0s 0ms 0dpi 0dpcm 0dppx 0hz 0khz", "any:0 0s 0ms 0dpi 0dpcm 0dppx 0hz 0khz"}, {"any:0deg 0s 0ms 0dpi 0dpcm 0dppx 0hz 0khz", "any:0 0s 0ms 0dpi 0dpcm 0dppx 0hz 0khz"},
{"width:calc(0%-0px)", "width:calc(0%-0px)"},
{"border-left:0 none", "border-left:0"},
{"--custom-variable:0px;", "--custom-variable:0px"}, {"--custom-variable:0px;", "--custom-variable:0px"},
{"--foo: if(x > 5) this.width = 10", "--foo: if(x > 5) this.width = 10"}, {"--foo: if(x > 5) this.width = 10", "--foo: if(x > 5) this.width = 10"},
{"--foo: ;", "--foo: "}, {"--foo: ;", "--foo: "},
@ -156,7 +173,7 @@ func TestCSSInline(t *testing.T) {
{"margin: 1 1 1;", "margin:1"}, {"margin: 1 1 1;", "margin:1"},
{"margin: 1 2 1;", "margin:1 2"}, {"margin: 1 2 1;", "margin:1 2"},
{"margin: 1 2 3;", "margin:1 2 3"}, {"margin: 1 2 3;", "margin:1 2 3"},
{"margin: 0%;", "margin:0"}, // {"margin: 0%;", "margin:0"},
{"color: rgb(255,64,64);", "color:#ff4040"}, {"color: rgb(255,64,64);", "color:#ff4040"},
{"color: rgb(256,-34,2342435);", "color:#f0f"}, {"color: rgb(256,-34,2342435);", "color:#f0f"},
{"color: rgb(120%,-45%,234234234%);", "color:#f0f"}, {"color: rgb(120%,-45%,234234234%);", "color:#f0f"},
@ -181,6 +198,27 @@ func TestCSSInline(t *testing.T) {
} }
} }
func TestCSSKeepCSS2(t *testing.T) {
tests := []struct {
css string
expected string
}{
{`margin:5000em`, `margin:5000em`},
}
m := minify.New()
params := map[string]string{"inline": "1"}
cssMinifier := &Minifier{Decimals: -1, KeepCSS2: true}
for _, tt := range tests {
t.Run(tt.css, func(t *testing.T) {
r := bytes.NewBufferString(tt.css)
w := &bytes.Buffer{}
err := cssMinifier.Minify(m, w, r, params)
test.Minify(t, tt.css, err, w.String(), tt.expected)
})
}
}
func TestReaderErrors(t *testing.T) { func TestReaderErrors(t *testing.T) {
r := test.NewErrorReader(0) r := test.NewErrorReader(0)
w := &bytes.Buffer{} w := &bytes.Buffer{}

View file

@ -2,14 +2,26 @@ package css
import "github.com/tdewolff/parse/css" import "github.com/tdewolff/parse/css"
var requiredDimension = map[string]bool{ var optionalZeroDimension = map[string]bool{
"s": true, "px": true,
"ms": true, "mm": true,
"dpi": true, "q": true,
"dpcm": true, "cm": true,
"dppx": true, "in": true,
"hz": true, "pt": true,
"khz": true, "pc": true,
"ch": true,
"em": true,
"ex": true,
"rem": true,
"vh": true,
"vw": true,
"vmin": true,
"vmax": true,
"deg": true,
"grad": true,
"rad": true,
"turn": true,
} }
// Uses http://www.w3.org/TR/2010/PR-css3-color-20101028/ for colors // Uses http://www.w3.org/TR/2010/PR-css3-color-20101028/ for colors

View file

@ -80,10 +80,10 @@ func (o *Minifier) Minify(m *minify.M, w io.Writer, r io.Reader, _ map[string]st
return err return err
} }
case html.CommentToken: case html.CommentToken:
if o.KeepConditionalComments && len(t.Text) > 6 && (bytes.HasPrefix(t.Text, []byte("[if ")) || bytes.Equal(t.Text, []byte("[endif]"))) { if o.KeepConditionalComments && len(t.Text) > 6 && (bytes.HasPrefix(t.Text, []byte("[if ")) || bytes.Equal(t.Text, []byte("[endif]")) || bytes.Equal(t.Text, []byte("<![endif]"))) {
// [if ...] is always 7 or more characters, [endif] is only encountered for downlevel-revealed // [if ...] is always 7 or more characters, [endif] is only encountered for downlevel-revealed
// see https://msdn.microsoft.com/en-us/library/ms537512(v=vs.85).aspx#syntax // see https://msdn.microsoft.com/en-us/library/ms537512(v=vs.85).aspx#syntax
if bytes.HasPrefix(t.Data, []byte("<!--[if ")) { // downlevel-hidden if bytes.HasPrefix(t.Data, []byte("<!--[if ")) && len(t.Data) > len("<!--[if ]><![endif]-->") { // downlevel-hidden
begin := bytes.IndexByte(t.Data, '>') + 1 begin := bytes.IndexByte(t.Data, '>') + 1
end := len(t.Data) - len("<![endif]-->") end := len(t.Data) - len("<![endif]-->")
if _, err := w.Write(t.Data[:begin]); err != nil { if _, err := w.Write(t.Data[:begin]); err != nil {
@ -95,7 +95,7 @@ func (o *Minifier) Minify(m *minify.M, w io.Writer, r io.Reader, _ map[string]st
if _, err := w.Write(t.Data[end:]); err != nil { if _, err := w.Write(t.Data[end:]); err != nil {
return err return err
} }
} else if _, err := w.Write(t.Data); err != nil { // downlevel-revealed } else if _, err := w.Write(t.Data); err != nil { // downlevel-revealed or short downlevel-hidden
return err return err
} }
} }
@ -281,13 +281,16 @@ func (o *Minifier) Minify(m *minify.M, w io.Writer, r io.Reader, _ map[string]st
attrs := tb.Attributes(html.Content, html.Http_Equiv, html.Charset, html.Name) attrs := tb.Attributes(html.Content, html.Http_Equiv, html.Charset, html.Name)
if content := attrs[0]; content != nil { if content := attrs[0]; content != nil {
if httpEquiv := attrs[1]; httpEquiv != nil { if httpEquiv := attrs[1]; httpEquiv != nil {
content.AttrVal = minify.ContentType(content.AttrVal) if charset := attrs[2]; charset == nil && parse.EqualFold(httpEquiv.AttrVal, []byte("content-type")) {
if charset := attrs[2]; charset == nil && parse.EqualFold(httpEquiv.AttrVal, []byte("content-type")) && bytes.Equal(content.AttrVal, []byte("text/html;charset=utf-8")) { content.AttrVal = minify.Mediatype(content.AttrVal)
if bytes.Equal(content.AttrVal, []byte("text/html;charset=utf-8")) {
httpEquiv.Text = nil httpEquiv.Text = nil
content.Text = []byte("charset") content.Text = []byte("charset")
content.Hash = html.Charset content.Hash = html.Charset
content.AttrVal = []byte("utf-8") content.AttrVal = []byte("utf-8")
}
} else if parse.EqualFold(httpEquiv.AttrVal, []byte("content-style-type")) { } else if parse.EqualFold(httpEquiv.AttrVal, []byte("content-style-type")) {
content.AttrVal = minify.Mediatype(content.AttrVal)
defaultStyleType, defaultStyleParams = parse.Mediatype(content.AttrVal) defaultStyleType, defaultStyleParams = parse.Mediatype(content.AttrVal)
if defaultStyleParams != nil { if defaultStyleParams != nil {
defaultInlineStyleParams = defaultStyleParams defaultInlineStyleParams = defaultStyleParams
@ -296,6 +299,7 @@ func (o *Minifier) Minify(m *minify.M, w io.Writer, r io.Reader, _ map[string]st
defaultInlineStyleParams = map[string]string{"inline": "1"} defaultInlineStyleParams = map[string]string{"inline": "1"}
} }
} else if parse.EqualFold(httpEquiv.AttrVal, []byte("content-script-type")) { } else if parse.EqualFold(httpEquiv.AttrVal, []byte("content-script-type")) {
content.AttrVal = minify.Mediatype(content.AttrVal)
defaultScriptType, defaultScriptParams = parse.Mediatype(content.AttrVal) defaultScriptType, defaultScriptParams = parse.Mediatype(content.AttrVal)
} }
} }
@ -365,7 +369,7 @@ func (o *Minifier) Minify(m *minify.M, w io.Writer, r io.Reader, _ map[string]st
if attr.Traits&caselessAttr != 0 { if attr.Traits&caselessAttr != 0 {
val = parse.ToLower(val) val = parse.ToLower(val)
if attr.Hash == html.Enctype || attr.Hash == html.Codetype || attr.Hash == html.Accept || attr.Hash == html.Type && (t.Hash == html.A || t.Hash == html.Link || t.Hash == html.Object || t.Hash == html.Param || t.Hash == html.Script || t.Hash == html.Style || t.Hash == html.Source) { if attr.Hash == html.Enctype || attr.Hash == html.Codetype || attr.Hash == html.Accept || attr.Hash == html.Type && (t.Hash == html.A || t.Hash == html.Link || t.Hash == html.Object || t.Hash == html.Param || t.Hash == html.Script || t.Hash == html.Style || t.Hash == html.Source) {
val = minify.ContentType(val) val = minify.Mediatype(val)
} }
} }
if rawTagHash != 0 && attr.Hash == html.Type { if rawTagHash != 0 && attr.Hash == html.Type {

View file

@ -32,6 +32,7 @@ func TestHTML(t *testing.T) {
{`<html><head></head><body>x</body></html>`, `x`}, {`<html><head></head><body>x</body></html>`, `x`},
{`<meta http-equiv="content-type" content="text/html; charset=utf-8">`, `<meta charset=utf-8>`}, {`<meta http-equiv="content-type" content="text/html; charset=utf-8">`, `<meta charset=utf-8>`},
{`<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />`, `<meta charset=utf-8>`}, {`<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />`, `<meta charset=utf-8>`},
{`<meta http-equiv="Content-Security-Policy" content="default-src 'self'; img-src https://*; child-src 'none';">`, `<meta http-equiv=content-security-policy content="default-src 'self'; img-src https://*; child-src 'none';">`},
{`<meta name="keywords" content="a, b">`, `<meta name=keywords content=a,b>`}, {`<meta name="keywords" content="a, b">`, `<meta name=keywords content=a,b>`},
{`<meta name="viewport" content="width = 996" />`, `<meta name=viewport content="width=996">`}, {`<meta name="viewport" content="width = 996" />`, `<meta name=viewport content="width=996">`},
{`<span attr="test"></span>`, `<span attr=test></span>`}, {`<span attr="test"></span>`, `<span attr=test></span>`},
@ -178,6 +179,7 @@ func TestHTMLKeepConditionalComments(t *testing.T) {
}{ }{
{`<!--[if IE 6]> <b> </b> <![endif]-->`, `<!--[if IE 6]><b></b><![endif]-->`}, {`<!--[if IE 6]> <b> </b> <![endif]-->`, `<!--[if IE 6]><b></b><![endif]-->`},
{`<![if IE 6]> <b> </b> <![endif]>`, `<![if IE 6]><b></b><![endif]>`}, {`<![if IE 6]> <b> </b> <![endif]>`, `<![if IE 6]><b></b><![endif]>`},
{`<!--[if !mso]><!--> <b> </b> <!--<![endif]-->`, `<!--[if !mso]><!--><b></b><!--<![endif]-->`},
} }
m := minify.New() m := minify.New()

View file

@ -124,7 +124,6 @@ var attrMap = map[html.Hash]traits{
html.Defer: booleanAttr, html.Defer: booleanAttr,
html.Dir: caselessAttr, html.Dir: caselessAttr,
html.Disabled: booleanAttr, html.Disabled: booleanAttr,
html.Draggable: booleanAttr,
html.Enabled: booleanAttr, html.Enabled: booleanAttr,
html.Enctype: caselessAttr, html.Enctype: caselessAttr,
html.Face: caselessAttr, html.Face: caselessAttr,

View file

@ -48,29 +48,38 @@ func (o *Minifier) Minify(_ *minify.M, w io.Writer, r io.Reader, _ map[string]st
lineTerminatorQueued = true lineTerminatorQueued = true
} else if tt == js.WhitespaceToken { } else if tt == js.WhitespaceToken {
whitespaceQueued = true whitespaceQueued = true
} else if tt == js.CommentToken { } else if tt == js.SingleLineCommentToken || tt == js.MultiLineCommentToken {
if len(data) > 5 && data[1] == '*' && data[2] == '!' { if len(data) > 5 && data[1] == '*' && data[2] == '!' {
if _, err := w.Write(data[:3]); err != nil { if _, err := w.Write(data[:3]); err != nil {
return err return err
} }
comment := parse.TrimWhitespace(parse.ReplaceMultipleWhitespace(data[3 : len(data)-2])) comment := parse.ReplaceMultipleWhitespace(data[3 : len(data)-2])
if tt != js.MultiLineCommentToken {
// don't trim newlines in multiline comments as that might change ASI
// (we could do a more expensive check post-factum but it's not worth it)
comment = parse.TrimWhitespace(comment)
}
if _, err := w.Write(comment); err != nil { if _, err := w.Write(comment); err != nil {
return err return err
} }
if _, err := w.Write(data[len(data)-2:]); err != nil { if _, err := w.Write(data[len(data)-2:]); err != nil {
return err return err
} }
} else if tt == js.MultiLineCommentToken {
lineTerminatorQueued = true
} else {
whitespaceQueued = true
} }
} else { } else {
first := data[0] first := data[0]
if (prev == js.IdentifierToken || prev == js.NumericToken || prev == js.PunctuatorToken || prev == js.StringToken || prev == js.RegexpToken) && if (prev == js.IdentifierToken || prev == js.NumericToken || prev == js.PunctuatorToken || prev == js.StringToken || prev == js.TemplateToken || prev == js.RegexpToken) &&
(tt == js.IdentifierToken || tt == js.NumericToken || tt == js.StringToken || tt == js.PunctuatorToken || tt == js.RegexpToken) { (tt == js.IdentifierToken || tt == js.NumericToken || tt == js.StringToken || tt == js.TemplateToken || tt == js.PunctuatorToken || tt == js.RegexpToken) {
if lineTerminatorQueued && (prev != js.PunctuatorToken || prevLast == '}' || prevLast == ']' || prevLast == ')' || prevLast == '+' || prevLast == '-' || prevLast == '"' || prevLast == '\'') && if lineTerminatorQueued && (prev != js.PunctuatorToken || prevLast == '}' || prevLast == ']' || prevLast == ')' || prevLast == '+' || prevLast == '-' || prevLast == '"' || prevLast == '\'') &&
(tt != js.PunctuatorToken || first == '{' || first == '[' || first == '(' || first == '+' || first == '-' || first == '!' || first == '~') { (tt != js.PunctuatorToken || first == '{' || first == '[' || first == '(' || first == '+' || first == '-' || first == '!' || first == '~') {
if _, err := w.Write(newlineBytes); err != nil { if _, err := w.Write(newlineBytes); err != nil {
return err return err
} }
} else if whitespaceQueued && (prev != js.StringToken && prev != js.PunctuatorToken && tt != js.PunctuatorToken || (prevLast == '+' || prevLast == '-') && first == prevLast) { } else if whitespaceQueued && (prev != js.StringToken && prev != js.PunctuatorToken && tt != js.PunctuatorToken || (prevLast == '+' || prevLast == '-' || prevLast == '/') && first == prevLast) {
if _, err := w.Write(spaceBytes); err != nil { if _, err := w.Write(spaceBytes); err != nil {
return err return err
} }

View file

@ -40,6 +40,17 @@ func TestJS(t *testing.T) {
{"false\n\"string\"", "false\n\"string\""}, // #109 {"false\n\"string\"", "false\n\"string\""}, // #109
{"`\n", "`"}, // go fuzz {"`\n", "`"}, // go fuzz
{"a\n~b", "a\n~b"}, // #132 {"a\n~b", "a\n~b"}, // #132
{"x / /\\d+/.exec(s)[0]", "x/ /\\d+/.exec(s)[0]"}, // #183
{"function(){}\n`string`", "function(){}\n`string`"}, // #181
{"false\n`string`", "false\n`string`"}, // #181
{"`string`\nwhatever()", "`string`\nwhatever()"}, // #181
{"x+/**/++y", "x+ ++y"}, // #185
{"x+\n++y", "x+\n++y"}, // #185
{"f()/*!com\nment*/g()", "f()/*!com\nment*/g()"}, // #185
{"f()/*com\nment*/g()", "f()\ng()"}, // #185
{"f()/*!\n*/g()", "f()/*!\n*/g()"}, // #185
// go-fuzz // go-fuzz
{`/\`, `/\`}, {`/\`, `/\`},

View file

@ -33,6 +33,8 @@ func NewPathData(o *Minifier) *PathData {
} }
} }
// ShortenPathData takes a full pathdata string and returns a shortened version. The original string is overwritten.
// It parses all commands (M, A, Z, ...) and coordinates (numbers) and calls copyInstruction for each command.
func (p *PathData) ShortenPathData(b []byte) []byte { func (p *PathData) ShortenPathData(b []byte) []byte {
var x0, y0 float64 var x0, y0 float64
var cmd byte var cmd byte
@ -74,6 +76,8 @@ func (p *PathData) ShortenPathData(b []byte) []byte {
return b[:j] return b[:j]
} }
// copyInstruction copies pathdata of a single command, but may be comprised of multiple sets for that command. For example, L takes two coordinates, but this function may process 2*N coordinates. Lowercase commands are relative commands, where the coordinates are relative to the previous point. Uppercase commands have absolute coordinates.
// We update p.x and p.y (the current coordinates) according to the commands given. For each set of coordinates we call shortenCurPosInstruction and shortenAltPosInstruction. The former just minifies the coordinates, the latter will inverse the lowercase/uppercase of the command, and see if the coordinates get smaller due to that. The shortest is chosen and copied to `b`.
func (p *PathData) copyInstruction(b []byte, cmd byte) int { func (p *PathData) copyInstruction(b []byte, cmd byte) int {
n := len(p.coords) n := len(p.coords)
if n == 0 { if n == 0 {
@ -191,6 +195,7 @@ func (p *PathData) copyInstruction(b []byte, cmd byte) int {
return j return j
} }
// shortenCurPosInstruction only minifies the coordinates.
func (p *PathData) shortenCurPosInstruction(cmd byte, coords [][]byte) PathDataState { func (p *PathData) shortenCurPosInstruction(cmd byte, coords [][]byte) PathDataState {
state := p.state state := p.state
p.curBuffer = p.curBuffer[:0] p.curBuffer = p.curBuffer[:0]
@ -202,7 +207,8 @@ func (p *PathData) shortenCurPosInstruction(cmd byte, coords [][]byte) PathDataS
} }
for i, coord := range coords { for i, coord := range coords {
isFlag := false isFlag := false
if (cmd == 'A' || cmd == 'a') && (i%7 == 3 || i%7 == 4) { // Arc has boolean flags that can only be 0 or 1. Setting isFlag prevents from adding a dot before a zero (instead of a space). However, when the dot already was there, the command is malformed and could make the path longer than before, introducing bugs.
if (cmd == 'A' || cmd == 'a') && (i%7 == 3 || i%7 == 4) && coord[0] != '.' {
isFlag = true isFlag = true
} }
@ -212,6 +218,7 @@ func (p *PathData) shortenCurPosInstruction(cmd byte, coords [][]byte) PathDataS
return state return state
} }
// shortenAltPosInstruction toggles the command between absolute / relative coordinates and minifies the coordinates.
func (p *PathData) shortenAltPosInstruction(cmd byte, coordFloats []float64, x, y float64) PathDataState { func (p *PathData) shortenAltPosInstruction(cmd byte, coordFloats []float64, x, y float64) PathDataState {
state := p.state state := p.state
p.altBuffer = p.altBuffer[:0] p.altBuffer = p.altBuffer[:0]
@ -250,6 +257,7 @@ func (p *PathData) shortenAltPosInstruction(cmd byte, coordFloats []float64, x,
return state return state
} }
// copyNumber will copy a number to the destination buffer, taking into account space or dot insertion to guarantee the shortest pathdata.
func (state *PathDataState) copyNumber(buffer *[]byte, coord []byte, isFlag bool) { func (state *PathDataState) copyNumber(buffer *[]byte, coord []byte, isFlag bool) {
if state.prevDigit && (coord[0] >= '0' && coord[0] <= '9' || coord[0] == '.' && state.prevDigitIsInt) { if state.prevDigit && (coord[0] >= '0' && coord[0] <= '9' || coord[0] == '.' && state.prevDigitIsInt) {
if coord[0] == '0' && !state.prevDigitIsInt { if coord[0] == '0' && !state.prevDigitIsInt {

View file

@ -28,8 +28,9 @@ func TestPathData(t *testing.T) {
{"M.0.1", "M0 .1"}, {"M.0.1", "M0 .1"},
{"M200.0.1", "M2e2.1"}, {"M200.0.1", "M2e2.1"},
{"M0 0a3.28 3.28.0.0.0 3.279 3.28", "M0 0a3.28 3.28.0 0 0 3.279 3.28"}, // #114 {"M0 0a3.28 3.28.0.0.0 3.279 3.28", "M0 0a3.28 3.28.0.0.0 3.279 3.28"}, // #114
{"A1.1.0.0.0.0.2.3", "A1.1.0.0 0 0 .2."}, // bad input (sweep and large-arc are not booleans) gives bad output {"A1.1.0.0.0.0.2.3", "A1.1.0.0.0.0.2.3"}, // bad input (sweep and large-arc are not booleans) gives bad output
{"A.0.0.4.0.0.0.3", "A0 0 .4.0.0.0.3"}, // bad input, keep dot for booleans
// fuzz // fuzz
{"", ""}, {"", ""},
@ -37,7 +38,11 @@ func TestPathData(t *testing.T) {
{".8.00c0", ""}, {".8.00c0", ""},
{".1.04h0e6.0e6.0e0.0", "h0 0 0 0"}, {".1.04h0e6.0e6.0e0.0", "h0 0 0 0"},
{"M.1.0.0.2Z", "M.1.0.0.2z"}, {"M.1.0.0.2Z", "M.1.0.0.2z"},
{"A.0.0.0.0.3.2e3.7.0.0.0.0.0.1.3.0.0.0.0.2.3.2.0.0.0.0.20.2e-10.0.0.0.0.0.0.0.0", "A0 0 0 0 .3 2e2.7.0.0.0 0 0 .1.3 30 0 0 0 .2.3.2 3 20 0 0 .2 2e-1100 11 0 0 0 "}, // bad input (sweep and large-arc are not booleans) gives bad output {"A.0.0.0.0.3.2e3.7.0.0.0.0.0.1.3.0.0.0.0.2.3.2.0.0.0.0.20.2e-10.0.0.0.0.0.0.0.0", "A0 0 0 0 .3 2e2.7.0.0.0.0.0.1.3.0.0.0.0.2.3.2.0.0.0.0.2 2e-11.0.0.0.0.0.0.0.0"}, // bad input (sweep and large-arc are not booleans) gives bad output
{
"A.0.0.4.0.0.0.3.0.0.0.0.0.4.2.0.0.0.0.2.0.4.0.0.0.4.2.8.2.0.0.0.2.9.28.0.0.0.0.0.2.3.0.0.0.0.0.0.2.3.2.09e-03.0.0.0.0.8.0.0.0.0.0.0.0",
"A0 0 .4.0.0.0.3.0.0.0.0.0.4.2.0.0.0.0.2.0.4.0.0.0.4.2.8.2.0.0.0.2.9.28.0.0.0.0.0.2.3.0.0.0.0.0.0.2.3.2 9e-5.0.0.0.0.8.0.0.0.0.0.0.0",
},
} }
p := NewPathData(&Minifier{Decimals: -1}) p := NewPathData(&Minifier{Decimals: -1})

View file

@ -51,7 +51,6 @@ func (o *Minifier) Minify(m *minify.M, w io.Writer, r io.Reader, _ map[string]st
p := NewPathData(o) p := NewPathData(o)
minifyBuffer := buffer.NewWriter(make([]byte, 0, 64)) minifyBuffer := buffer.NewWriter(make([]byte, 0, 64))
attrByteBuffer := make([]byte, 0, 64) attrByteBuffer := make([]byte, 0, 64)
gStack := make([]bool, 0)
l := xml.NewLexer(r) l := xml.NewLexer(r)
defer l.Restore() defer l.Restore()
@ -59,7 +58,6 @@ func (o *Minifier) Minify(m *minify.M, w io.Writer, r io.Reader, _ map[string]st
tb := NewTokenBuffer(l) tb := NewTokenBuffer(l)
for { for {
t := *tb.Shift() t := *tb.Shift()
SWITCH:
switch t.TokenType { switch t.TokenType {
case xml.ErrorToken: case xml.ErrorToken:
if l.Err() == io.EOF { if l.Err() == io.EOF {
@ -113,29 +111,7 @@ func (o *Minifier) Minify(m *minify.M, w io.Writer, r io.Reader, _ map[string]st
} }
case xml.StartTagToken: case xml.StartTagToken:
tag = t.Hash tag = t.Hash
if containerTagMap[tag] { // skip empty containers if tag == svg.Metadata {
i := 0
for {
next := tb.Peek(i)
i++
if next.TokenType == xml.EndTagToken && next.Hash == tag || next.TokenType == xml.StartTagCloseVoidToken || next.TokenType == xml.ErrorToken {
for j := 0; j < i; j++ {
tb.Shift()
}
break SWITCH
} else if next.TokenType != xml.AttributeToken && next.TokenType != xml.StartTagCloseToken {
break
}
}
if tag == svg.G {
if tb.Peek(0).TokenType == xml.StartTagCloseToken {
gStack = append(gStack, false)
tb.Shift()
break
}
gStack = append(gStack, true)
}
} else if tag == svg.Metadata {
skipTag(tb, tag) skipTag(tb, tag)
break break
} else if tag == svg.Line { } else if tag == svg.Line {
@ -184,7 +160,7 @@ func (o *Minifier) Minify(m *minify.M, w io.Writer, r io.Reader, _ map[string]st
} }
if tag == svg.Svg && attr == svg.ContentStyleType { if tag == svg.Svg && attr == svg.ContentStyleType {
val = minify.ContentType(val) val = minify.Mediatype(val)
defaultStyleType = val defaultStyleType = val
} else if attr == svg.Style { } else if attr == svg.Style {
minifyBuffer.Reset() minifyBuffer.Reset()
@ -266,13 +242,6 @@ func (o *Minifier) Minify(m *minify.M, w io.Writer, r io.Reader, _ map[string]st
} }
case xml.EndTagToken: case xml.EndTagToken:
tag = 0 tag = 0
if t.Hash == svg.G && len(gStack) > 0 {
if !gStack[len(gStack)-1] {
gStack = gStack[:len(gStack)-1]
break
}
gStack = gStack[:len(gStack)-1]
}
if len(t.Data) > 3+len(t.Text) { if len(t.Data) > 3+len(t.Text) {
t.Data[2+len(t.Text)] = '>' t.Data[2+len(t.Text)] = '>'
t.Data = t.Data[:3+len(t.Text)] t.Data = t.Data[:3+len(t.Text)]

View file

@ -41,9 +41,9 @@ func TestSVG(t *testing.T) {
{`<path d="M20 20l-10-10z"/>`, `<path d="M20 20 10 10z"/>`}, {`<path d="M20 20l-10-10z"/>`, `<path d="M20 20 10 10z"/>`},
{`<?xml version="1.0" encoding="utf-8"?>`, ``}, {`<?xml version="1.0" encoding="utf-8"?>`, ``},
{`<svg viewbox="0 0 16 16"><path/></svg>`, `<svg viewbox="0 0 16 16"><path/></svg>`}, {`<svg viewbox="0 0 16 16"><path/></svg>`, `<svg viewbox="0 0 16 16"><path/></svg>`},
{`<g></g>`, ``}, {`<g></g>`, `<g/>`},
{`<g><path/></g>`, `<path/>`}, {`<g><path/></g>`, `<g><path/></g>`},
{`<g id="a"><g><path/></g></g>`, `<g id="a"><path/></g>`}, {`<g id="a"><g><path/></g></g>`, `<g id="a"><g><path/></g></g>`},
{`<path fill="#ffffff"/>`, `<path fill="#fff"/>`}, {`<path fill="#ffffff"/>`, `<path fill="#fff"/>`},
{`<path fill="#fff"/>`, `<path fill="#fff"/>`}, {`<path fill="#fff"/>`, `<path fill="#fff"/>`},
{`<path fill="white"/>`, `<path fill="#fff"/>`}, {`<path fill="white"/>`, `<path fill="#fff"/>`},

View file

@ -2,18 +2,6 @@ package svg // import "github.com/tdewolff/minify/svg"
import "github.com/tdewolff/parse/svg" import "github.com/tdewolff/parse/svg"
var containerTagMap = map[svg.Hash]bool{
svg.A: true,
svg.Defs: true,
svg.G: true,
svg.Marker: true,
svg.Mask: true,
svg.Missing_Glyph: true,
svg.Pattern: true,
svg.Switch: true,
svg.Symbol: true,
}
var colorAttrMap = map[svg.Hash]bool{ var colorAttrMap = map[svg.Hash]bool{
svg.Color: true, svg.Color: true,
svg.Fill: true, svg.Fill: true,

File diff suppressed because it is too large Load diff

View file

@ -70,6 +70,10 @@ type Token struct {
Data []byte Data []byte
} }
func (t Token) String() string {
return t.TokenType.String() + "('" + string(t.Data) + "')"
}
// Parser is the state for the parser. // Parser is the state for the parser.
type Parser struct { type Parser struct {
l *Lexer l *Lexer

View file

@ -23,7 +23,8 @@ const (
UnknownToken // extra token when no token can be matched UnknownToken // extra token when no token can be matched
WhitespaceToken // space \t \v \f WhitespaceToken // space \t \v \f
LineTerminatorToken // \r \n \r\n LineTerminatorToken // \r \n \r\n
CommentToken SingleLineCommentToken
MultiLineCommentToken // token for comments with line terminators (not just any /*block*/)
IdentifierToken IdentifierToken
PunctuatorToken /* { } ( ) [ ] . ; , < > <= >= == != === !== + - * % ++ -- << >> PunctuatorToken /* { } ( ) [ ] . ; , < > <= >= == != === !== + - * % ++ -- << >>
>>> & | ^ ! ~ && || ? : = += -= *= %= <<= >>= >>>= &= |= ^= / /= >= */ >>> & | ^ ! ~ && || ? : = += -= *= %= <<= >>= >>>= &= |= ^= / /= >= */
@ -68,8 +69,10 @@ func (tt TokenType) String() string {
return "Whitespace" return "Whitespace"
case LineTerminatorToken: case LineTerminatorToken:
return "LineTerminator" return "LineTerminator"
case CommentToken: case SingleLineCommentToken:
return "Comment" return "SingleLineComment"
case MultiLineCommentToken:
return "MultiLineComment"
case IdentifierToken: case IdentifierToken:
return "Identifier" return "Identifier"
case PunctuatorToken: case PunctuatorToken:
@ -174,15 +177,15 @@ func (l *Lexer) Next() (TokenType, []byte) {
l.r.Move(1) l.r.Move(1)
tt = PunctuatorToken tt = PunctuatorToken
case '<', '>', '=', '!', '+', '-', '*', '%', '&', '|', '^': case '<', '>', '=', '!', '+', '-', '*', '%', '&', '|', '^':
if (c == '<' || (l.emptyLine && c == '-')) && l.consumeCommentToken() { if l.consumeHTMLLikeCommentToken() {
return CommentToken, l.r.Shift() return SingleLineCommentToken, l.r.Shift()
} else if l.consumeLongPunctuatorToken() { } else if l.consumeLongPunctuatorToken() {
l.state = ExprState l.state = ExprState
tt = PunctuatorToken tt = PunctuatorToken
} }
case '/': case '/':
if l.consumeCommentToken() { if tt = l.consumeCommentToken(); tt != UnknownToken {
return CommentToken, l.r.Shift() return tt, l.r.Shift()
} else if l.state == ExprState && l.consumeRegexpToken() { } else if l.state == ExprState && l.consumeRegexpToken() {
l.state = SubscriptState l.state = SubscriptState
tt = RegexpToken tt = RegexpToken
@ -374,46 +377,54 @@ func (l *Lexer) consumeSingleLineComment() {
//////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////
func (l *Lexer) consumeCommentToken() bool { func (l *Lexer) consumeHTMLLikeCommentToken() bool {
c := l.r.Peek(0)
if c == '<' && l.r.Peek(1) == '!' && l.r.Peek(2) == '-' && l.r.Peek(3) == '-' {
// opening HTML-style single line comment
l.r.Move(4)
l.consumeSingleLineComment()
return true
} else if l.emptyLine && c == '-' && l.r.Peek(1) == '-' && l.r.Peek(2) == '>' {
// closing HTML-style single line comment
// (only if current line didn't contain any meaningful tokens)
l.r.Move(3)
l.consumeSingleLineComment()
return true
}
return false
}
func (l *Lexer) consumeCommentToken() TokenType {
c := l.r.Peek(0) c := l.r.Peek(0)
if c == '/' { if c == '/' {
c = l.r.Peek(1) c = l.r.Peek(1)
if c == '/' { if c == '/' {
// single line // single line comment
l.r.Move(2) l.r.Move(2)
l.consumeSingleLineComment() l.consumeSingleLineComment()
return SingleLineCommentToken
} else if c == '*' { } else if c == '*' {
// multi line // block comment (potentially multiline)
tt := SingleLineCommentToken
l.r.Move(2) l.r.Move(2)
for { for {
c := l.r.Peek(0) c := l.r.Peek(0)
if c == '*' && l.r.Peek(1) == '/' { if c == '*' && l.r.Peek(1) == '/' {
l.r.Move(2) l.r.Move(2)
return true break
} else if c == 0 { } else if c == 0 {
break break
} else if l.consumeLineTerminator() { } else if l.consumeLineTerminator() {
tt = MultiLineCommentToken
l.emptyLine = true l.emptyLine = true
} else { } else {
l.r.Move(1) l.r.Move(1)
} }
} }
} else { return tt
return false
} }
} else if c == '<' && l.r.Peek(1) == '!' && l.r.Peek(2) == '-' && l.r.Peek(3) == '-' {
// opening HTML-style single line comment
l.r.Move(4)
l.consumeSingleLineComment()
} else if c == '-' && l.r.Peek(1) == '-' && l.r.Peek(2) == '>' {
// closing HTML-style single line comment
// (only if current line didn't contain any meaningful tokens)
l.r.Move(3)
l.consumeSingleLineComment()
} else {
return false
} }
return true return UnknownToken
} }
func (l *Lexer) consumeLongPunctuatorToken() bool { func (l *Lexer) consumeLongPunctuatorToken() bool {
@ -643,6 +654,12 @@ func (l *Lexer) consumeTemplateToken() bool {
l.state = ExprState l.state = ExprState
l.r.Move(2) l.r.Move(2)
return true return true
} else if c == '\\' {
l.r.Move(1)
if c := l.r.Peek(0); c != 0 {
l.r.Move(1)
}
continue
} else if c == 0 { } else if c == 0 {
l.r.Rewind(mark) l.r.Rewind(mark)
return false return false

View file

@ -20,7 +20,7 @@ func TestTokens(t *testing.T) {
{"\n\r\r\n\u2028\u2029", TTs{LineTerminatorToken}}, {"\n\r\r\n\u2028\u2029", TTs{LineTerminatorToken}},
{"5.2 .04 0x0F 5e99", TTs{NumericToken, NumericToken, NumericToken, NumericToken}}, {"5.2 .04 0x0F 5e99", TTs{NumericToken, NumericToken, NumericToken, NumericToken}},
{"a = 'string'", TTs{IdentifierToken, PunctuatorToken, StringToken}}, {"a = 'string'", TTs{IdentifierToken, PunctuatorToken, StringToken}},
{"/*comment*/ //comment", TTs{CommentToken, CommentToken}}, {"/*comment*/ //comment", TTs{SingleLineCommentToken, SingleLineCommentToken}},
{"{ } ( ) [ ]", TTs{PunctuatorToken, PunctuatorToken, PunctuatorToken, PunctuatorToken, PunctuatorToken, PunctuatorToken}}, {"{ } ( ) [ ]", TTs{PunctuatorToken, PunctuatorToken, PunctuatorToken, PunctuatorToken, PunctuatorToken, PunctuatorToken}},
{". ; , < > <=", TTs{PunctuatorToken, PunctuatorToken, PunctuatorToken, PunctuatorToken, PunctuatorToken, PunctuatorToken}}, {". ; , < > <=", TTs{PunctuatorToken, PunctuatorToken, PunctuatorToken, PunctuatorToken, PunctuatorToken, PunctuatorToken}},
{">= == != === !==", TTs{PunctuatorToken, PunctuatorToken, PunctuatorToken, PunctuatorToken, PunctuatorToken}}, {">= == != === !==", TTs{PunctuatorToken, PunctuatorToken, PunctuatorToken, PunctuatorToken, PunctuatorToken}},
@ -31,12 +31,12 @@ func TestTokens(t *testing.T) {
{">>= >>>= &= |= ^= =>", TTs{PunctuatorToken, PunctuatorToken, PunctuatorToken, PunctuatorToken, PunctuatorToken, PunctuatorToken}}, {">>= >>>= &= |= ^= =>", TTs{PunctuatorToken, PunctuatorToken, PunctuatorToken, PunctuatorToken, PunctuatorToken, PunctuatorToken}},
{"a = /.*/g;", TTs{IdentifierToken, PunctuatorToken, RegexpToken, PunctuatorToken}}, {"a = /.*/g;", TTs{IdentifierToken, PunctuatorToken, RegexpToken, PunctuatorToken}},
{"/*co\nm\u2028m/*ent*/ //co//mment\u2029//comment", TTs{CommentToken, CommentToken, LineTerminatorToken, CommentToken}}, {"/*co\nm\u2028m/*ent*/ //co//mment\u2029//comment", TTs{MultiLineCommentToken, SingleLineCommentToken, LineTerminatorToken, SingleLineCommentToken}},
{"<!-", TTs{PunctuatorToken, PunctuatorToken, PunctuatorToken}}, {"<!-", TTs{PunctuatorToken, PunctuatorToken, PunctuatorToken}},
{"1<!--2\n", TTs{NumericToken, CommentToken, LineTerminatorToken}}, {"1<!--2\n", TTs{NumericToken, SingleLineCommentToken, LineTerminatorToken}},
{"x=y-->10\n", TTs{IdentifierToken, PunctuatorToken, IdentifierToken, PunctuatorToken, PunctuatorToken, NumericToken, LineTerminatorToken}}, {"x=y-->10\n", TTs{IdentifierToken, PunctuatorToken, IdentifierToken, PunctuatorToken, PunctuatorToken, NumericToken, LineTerminatorToken}},
{" /*comment*/ -->nothing\n", TTs{CommentToken, CommentToken, LineTerminatorToken}}, {" /*comment*/ -->nothing\n", TTs{SingleLineCommentToken, SingleLineCommentToken, LineTerminatorToken}},
{"1 /*comment\nmultiline*/ -->nothing\n", TTs{NumericToken, CommentToken, CommentToken, LineTerminatorToken}}, {"1 /*comment\nmultiline*/ -->nothing\n", TTs{NumericToken, MultiLineCommentToken, SingleLineCommentToken, LineTerminatorToken}},
{"$ _\u200C \\u2000 \u200C", TTs{IdentifierToken, IdentifierToken, IdentifierToken, UnknownToken}}, {"$ _\u200C \\u2000 \u200C", TTs{IdentifierToken, IdentifierToken, IdentifierToken, UnknownToken}},
{">>>=>>>>=", TTs{PunctuatorToken, PunctuatorToken, PunctuatorToken}}, {">>>=>>>>=", TTs{PunctuatorToken, PunctuatorToken, PunctuatorToken}},
{"1/", TTs{NumericToken, PunctuatorToken}}, {"1/", TTs{NumericToken, PunctuatorToken}},
@ -63,7 +63,7 @@ func TestTokens(t *testing.T) {
{"'\n '\u2028", TTs{UnknownToken, LineTerminatorToken, UnknownToken, LineTerminatorToken}}, {"'\n '\u2028", TTs{UnknownToken, LineTerminatorToken, UnknownToken, LineTerminatorToken}},
{"'str\\\U00100000ing\\0'", TTs{StringToken}}, {"'str\\\U00100000ing\\0'", TTs{StringToken}},
{"'strin\\00g'", TTs{StringToken}}, {"'strin\\00g'", TTs{StringToken}},
{"/*comment", TTs{CommentToken}}, {"/*comment", TTs{SingleLineCommentToken}},
{"a=/regexp", TTs{IdentifierToken, PunctuatorToken, RegexpToken}}, {"a=/regexp", TTs{IdentifierToken, PunctuatorToken, RegexpToken}},
{"\\u002", TTs{UnknownToken, IdentifierToken}}, {"\\u002", TTs{UnknownToken, IdentifierToken}},
@ -97,6 +97,9 @@ func TestTokens(t *testing.T) {
{"function f(){}/1/g", TTs{IdentifierToken, IdentifierToken, PunctuatorToken, PunctuatorToken, PunctuatorToken, PunctuatorToken, RegexpToken}}, {"function f(){}/1/g", TTs{IdentifierToken, IdentifierToken, PunctuatorToken, PunctuatorToken, PunctuatorToken, PunctuatorToken, RegexpToken}},
{"this.return/1/g", TTs{IdentifierToken, PunctuatorToken, IdentifierToken, PunctuatorToken, NumericToken, PunctuatorToken, IdentifierToken}}, {"this.return/1/g", TTs{IdentifierToken, PunctuatorToken, IdentifierToken, PunctuatorToken, NumericToken, PunctuatorToken, IdentifierToken}},
{"(a+b)/1/g", TTs{PunctuatorToken, IdentifierToken, PunctuatorToken, IdentifierToken, PunctuatorToken, PunctuatorToken, NumericToken, PunctuatorToken, IdentifierToken}}, {"(a+b)/1/g", TTs{PunctuatorToken, IdentifierToken, PunctuatorToken, IdentifierToken, PunctuatorToken, PunctuatorToken, NumericToken, PunctuatorToken, IdentifierToken}},
{"`\\``", TTs{TemplateToken}},
{"`\\${ 1 }`", TTs{TemplateToken}},
{"`\\\r\n`", TTs{TemplateToken}},
// go fuzz // go fuzz
{"`", TTs{UnknownToken}}, {"`", TTs{UnknownToken}},

View file

@ -1,6 +1,8 @@
package strconv // import "github.com/tdewolff/parse/strconv" package strconv // import "github.com/tdewolff/parse/strconv"
import "math" import (
"math"
)
// Int parses a byte-slice and returns the integer it represents. // Int parses a byte-slice and returns the integer it represents.
// If an invalid character is encountered, it will stop there. // If an invalid character is encountered, it will stop there.
@ -34,6 +36,9 @@ func ParseInt(b []byte) (int64, int) {
func LenInt(i int64) int { func LenInt(i int64) int {
if i < 0 { if i < 0 {
if i == -9223372036854775808 {
return 19
}
i = -i i = -i
} }
switch { switch {

View file

@ -41,6 +41,8 @@ func TestLenInt(t *testing.T) {
{1, 1}, {1, 1},
{10, 2}, {10, 2},
{99, 2}, {99, 2},
{9223372036854775807, 19},
{-9223372036854775808, 19},
// coverage // coverage
{100, 3}, {100, 3},

83
vendor/github.com/tdewolff/parse/strconv/price.go generated vendored Normal file
View file

@ -0,0 +1,83 @@
package strconv
// AppendPrice will append an int64 formatted as a price, where the int64 is the price in cents.
// It does not display whether a price is negative or not.
func AppendPrice(b []byte, price int64, dec bool, milSeparator byte, decSeparator byte) []byte {
if price < 0 {
if price == -9223372036854775808 {
x := []byte("92 233 720 368 547 758 08")
x[2] = milSeparator
x[6] = milSeparator
x[10] = milSeparator
x[14] = milSeparator
x[18] = milSeparator
x[22] = decSeparator
return append(b, x...)
}
price = -price
}
// rounding
if !dec {
firstDec := (price / 10) % 10
if firstDec >= 5 {
price += 100
}
}
// calculate size
n := LenInt(price) - 2
if n > 0 {
n += (n - 1) / 3 // mil separator
} else {
n = 1
}
if dec {
n += 2 + 1 // decimals + dec separator
}
// resize byte slice
i := len(b)
if i+n > cap(b) {
b = append(b, make([]byte, n)...)
} else {
b = b[:i+n]
}
// print fractional-part
i += n - 1
if dec {
for j := 0; j < 2; j++ {
c := byte(price%10) + '0'
price /= 10
b[i] = c
i--
}
b[i] = decSeparator
i--
} else {
price /= 100
}
if price == 0 {
b[i] = '0'
return b
}
// print integer-part
j := 0
for price > 0 {
if j == 3 {
b[i] = milSeparator
i--
j = 0
}
c := byte(price%10) + '0'
price /= 10
b[i] = c
i--
j++
}
return b
}

29
vendor/github.com/tdewolff/parse/strconv/price_test.go generated vendored Normal file
View file

@ -0,0 +1,29 @@
package strconv // import "github.com/tdewolff/parse/strconv"
import (
"testing"
"github.com/tdewolff/test"
)
func TestAppendPrice(t *testing.T) {
priceTests := []struct {
price int64
dec bool
expected string
}{
{0, false, "0"},
{0, true, "0.00"},
{100, true, "1.00"},
{-100, true, "1.00"},
{100000, false, "1,000"},
{100000, true, "1,000.00"},
{123456789012, true, "1,234,567,890.12"},
{9223372036854775807, true, "92,233,720,368,547,758.07"},
{-9223372036854775808, true, "92,233,720,368,547,758.08"},
}
for _, tt := range priceTests {
price := AppendPrice([]byte{}, tt.price, tt.dec, ',', '.')
test.String(t, string(price), tt.expected, "for", tt.price)
}
}