mirror of
https://gh.wpcy.net/https://github.com/discourse/discourse.git
synced 2026-05-17 11:48:52 +08:00
Refactors the use of the buffered-content mixin to native getters on the dependent classes. This mixin previously provided a cached wrapper around an instance of BufferedProxy and added 2 convenience methods aliasing BufferedProxy methods. ### Main changes: * Use of the`@cached` decorator to maintain parity with the previous version of `this.buffered` to make sure we only refresh the buffered proxy if the dependent property changes. * _Not entirely sure if @cached + @dependentCompat is more performant than just using `@computed`_ * Use of the`@dependentCompat` decorator to ensure backwards compatibility of the getter with computed properties - we will leave refactoring of those somewhere down the road as that would greatly increase the scope of this PR * `applyChanges` / `discardChanges` are the same as `applyBufferedChanges` / `discardBufferedChanges` for BufferedProxy
61 lines
1.6 KiB
JavaScript
61 lines
1.6 KiB
JavaScript
import { cached, tracked } from "@glimmer/tracking";
|
|
import Controller from "@ember/controller";
|
|
import { action } from "@ember/object";
|
|
import { dependentKeyCompat } from "@ember/object/compat";
|
|
import { not } from "@ember/object/computed";
|
|
import BufferedProxy from "ember-buffered-proxy/proxy";
|
|
import { ajax } from "discourse/lib/ajax";
|
|
import { propertyEqual } from "discourse/lib/computed";
|
|
|
|
export default class AdminCustomizeRobotsTxtController extends Controller {
|
|
@tracked model;
|
|
saved = false;
|
|
isSaving = false;
|
|
|
|
@propertyEqual("model.robots_txt", "buffered.robots_txt") saveDisabled;
|
|
|
|
@not("model.overridden") resetDisabled;
|
|
|
|
@cached
|
|
@dependentKeyCompat
|
|
get buffered() {
|
|
return BufferedProxy.create({
|
|
content: this.model,
|
|
});
|
|
}
|
|
|
|
@action
|
|
save() {
|
|
this.setProperties({
|
|
isSaving: true,
|
|
saved: false,
|
|
});
|
|
|
|
ajax("robots.json", {
|
|
type: "PUT",
|
|
data: { robots_txt: this.buffered.get("robots_txt") },
|
|
})
|
|
.then((data) => {
|
|
this.buffered.applyChanges();
|
|
this.set("saved", true);
|
|
this.set("model.overridden", data.overridden);
|
|
})
|
|
.finally(() => this.set("isSaving", false));
|
|
}
|
|
|
|
@action
|
|
reset() {
|
|
this.setProperties({
|
|
isSaving: true,
|
|
saved: false,
|
|
});
|
|
ajax("robots.json", { type: "DELETE" })
|
|
.then((data) => {
|
|
this.buffered.set("robots_txt", data.robots_txt);
|
|
this.buffered.applyChanges();
|
|
this.set("saved", true);
|
|
this.set("model.overridden", false);
|
|
})
|
|
.finally(() => this.set("isSaving", false));
|
|
}
|
|
}
|