I am trying to perform a transaction where I replace unique position values. When I reinsert the shows, the episodes don't attach even though I have preloaded them with the show and I am using cast_assoc in the changeset. Below are the files:
show_controller.ex
original_show = Repo.get!(Show, id)
|> Repo.preload(:episodes)
old_position = original_show.position
%{"position" => new_position} = show_params
new_position = String.to_integer(new_position)
query = from(s in Show, where: s.position <= ^new_position, where: s.position > ^old_position)
shows = Repo.all(query) |> Repo.preload(:episodes) |> Enum.sort(&(&1.position < &2.position))
result = Repo.transaction(fn ->
Repo.delete!(original_show)
Repo.delete_all(query)
Enum.each shows, fn show ->
show = Map.put(show, :position, show.position - 1)
changeset = Show.changeset(show)
case Repo.insert(changeset) do
{:ok, show} -> show
{:error, changeset} -> Repo.rollback(changeset)
end
end
changeset = Show.changeset(original_show, new_show_params)
case Repo.insert(changeset) do
{:ok, show} -> show
{:error, changeset} -> Repo.rollback(changeset)
end
end)
show.ex
def changeset(struct, params \\ %{}) do
struct
|> cast(params, [:title, :position, :shuffle_episodes])
|> cast_attachments(params, [:large_artwork_url, :parallax_artwork_url])
|> cast_assoc(:episodes)
|> validate_required([:title, :position, :large_artwork_url, :parallax_artwork_url, :shuffle_episodes])
|> unique_constraint(:position)
end
Related
My Influxdb 2 measurement contains a boolean field. I would like to formulate a query that outputs all rows where the field's value is true. An if condition in a filter line does not work.
In detail: I have workout data that includes the boolean field "isIndoor". I want to visualize all values in my dashboard if that field is not true. (I can rename/filter/map values/fields in later steps.)
from(bucket: "mydata")
|> range(start: v.timeRangeStart, stop: v.timeRangeStop)
|> filter(fn: (r) => if r._boolean == true then
// use all the fields
else
// actually, there shouldn't be an 'else' but Flux insists on it
)
// further treatment and filtering
How can this be accomplished?
New edit: I found out that by pivoting the data I can use a filter:
from(bucket: "mydata")
|> range(start: v.timeRangeStart, stop:v.timeRangeStop)
|> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value")
|> filter(fn: (r) => r["isIndoor"] == true)
Regrettably, now Grafana no longer recognized the output as graphable. I am now considering to empty the bucket and fill it with my data again but use the boolean value as a tag.
It's simple. If your condition == true then return true. In else case just return false
|> filter(fn: (r) => if myBool == true then true else false )
UPD:
|> filter(fn: (r) => r.isIndoor == true)
I have a block of code that I want to write in F#, but the examples I have are in C#. I would like some help to write this in the F# language, and help understanding how it works.
Here is the c# code I have to mimic:
builder.HasMany(r => r.Options).WithOne(o => o.Root).HasForeignKey(o => o.RootId).OnDelete(DeleteBehavior.Cascade);
In F#, I am trying to do this:
builder
.HasOne(fun i -> i.ProductionReport)
.WithMany(fun pr -> pr.CostItems)
.HasForeignKey(fun pr -> pr.ProductionReportId).OnDelete(DeleteBehavior.Cascade) |> ignore
And the issue, per visual studio, is that pr is of type obj. How do I make sure f# knows that pr is of type ProductionReport, according to the return type of builder.HasOne.
Here is the complete sample requested:
BackendDemoDbContext
namespace BackendDemo.BackendDemoContext
open Microsoft.EntityFrameworkCore
type BackendDemoContext(options: DbContextOptions<BackendDemoContext>) =
inherit DbContext(options)
override __.OnModelCreating modelbuilder =
//Todo:
//modelbuilder.ApplyConfiguration(new CostItemEntityTypeConfiguration());
//modelbuilder.ApplyConfiguration(new ProductionReportEntityTypeConfiguration());
CostItem
namespace BackendDemo.Data.Models
type CostItem() =
member val CostItemId = null with get, set
member val Paper1 = null with get, set
member val Paper2 = null with get, set
member val Cases = null with get, set
member val Boxes = null with get, set
member val Paste = null with get, set
member val Bundling = null with get, set
member val Ink = null with get, set
member val Cardboard = null with get, set
member val Wrapping = null with get, set
member val Labour = null with get, set
member val Fringe = null with get, set
member val Pallet = null with get, set
member val ProductionReportId =null with get,set
member val ProductionReport = null with get, set
ProductionReport
namespace BackendDemo.Data.Models
open System.Collections
open BackendDemo.Data.Models
type ProductionReport() =
//val keyword necessary for AutoProperties
member val ProductionReportId : int = 2
//Todo:
//abstract member CostItems : ICollection<CostItem> with get, set
CostItemEntityTypeConfiguration
namespace BackendDemo.Data.EntityConfigurations
open Microsoft.EntityFrameworkCore
open Microsoft.EntityFrameworkCore.Metadata.Builders
open BackendDemo.Data.Models
type CostItemEntityTypeConfiguration =
interface IEntityTypeConfiguration<CostItem> with
override this.Configure(builder: EntityTypeBuilder<CostItem>) =
builder.ToTable("CostItem") |> ignore
builder.HasKey(fun i -> i.CostItemId) |> ignore
builder.Property(fun i -> i.Paper1).IsRequired() |> ignore
builder.Property(fun i -> i.Paper2).IsRequired() |> ignore
builder.Property(fun i -> i.Cases).IsRequired() |> ignore
builder.Property(fun i -> i.Boxes).IsRequired() |> ignore
builder.Property(fun i -> i.Paste).IsRequired() |> ignore
builder.Property(fun i -> i.Bundling).IsRequired() |> ignore
builder.Property(fun i -> i.Ink).IsRequired() |> ignore
builder.Property(fun i -> i.Cardboard).IsRequired() |> ignore
builder.Property(fun i -> i.Wrapping).IsRequired() |> ignore
builder.Property(fun i -> i.Labour).IsRequired() |> ignore
builder.Property(fun i -> i.Fringe).IsRequired() |> ignore
builder.Property(fun i -> i.Pallet).IsRequired() |> ignore
builder
.HasOne(fun i -> i.ProductionReport)
.WithMany(fun pr -> pr.CostItems)
.HasForeignKey(fun pr -> pr.ProductionReportId).OnDelete(DeleteBehavior.Cascade) |> ignore
ProductionReportEntityTypeConfiguration
namespace BackendDemo.Data.EntityConfigurations
open Microsoft.EntityFrameworkCore
open Microsoft.EntityFrameworkCore.Metadata.Builders
open BackendDemo.Data.Models
type ProductionReportEntityTypeConfiguration =
interface IEntityTypeConfiguration<ProductionReport> with
override this.Configure(builder: EntityTypeBuilder<ProductionReport>) =
builder.ToTable("ProductionReport") |> ignore
//Todo
///builder.HasKey(fun r -> r.ProductionReportId) |> ignore
Here are the results of the suggestions below (thanks by the way!):
1 Try forcing an argument type
builder
.HasOne(fun i -> i.ProductionReport)
.WithMany(fun (pr: ProductionReport) -> pr.CostItems)
Result
2 Use the alternative Function syntax
builder
.HasOne(<# fun i -> i.ProductionReport #>)
.WithMany(<# fun pr -> pr.CostItems #>)
Result
3 Use the <# notation with specific type
builder
.HasOne(<# Func<ProductionReport,_> fun i -> i.ProductionReport #>)
.WithMany(<# Func<CostItem,_> fun pr -> pr.CostItems #>)
Result
4 Factorize the Expression solution from Nathan
static member toExpr (f:'a -> 'b) =
<# Func<_,_> (f) #>
|> LeafExpressionConverter.QuotationToExpression
|> unbox<Expression<Func<'a, 'b>>>
Factorization class
Result
5 Factorize the Expression with type notation suggested by Nathan
static member toExpr<'a, 'b> (f:'a -> 'b) =
<# Func<_,_> (f) #>
|> LeafExpressionConverter.QuotationToExpression
|> unbox<Expression<Func<'a, 'b>>>
Result
I think I got it, but it took some digging to figure out how to work with the expressions. I referenced this post's history to see how to build a System.Linq.Expressions.Expression. Here's what I have:
open System.Linq.Expressions
open Microsoft.FSharp.Linq.RuntimeHelpers
...
let toProdRptExpr : Expression<Func<CostItem, ProductionReport>> =
<# Func<_, _> (fun (i:CostItem) -> i.ProductionReport) #>
|> LeafExpressionConverter.QuotationToExpression
|> unbox<Expression<Func<CostItem, ProductionReport>>>
let toCostItemsExpr : Expression<Func<ProductionReport, seq<CostItem>>> =
<# Func<_,_> (fun (pr:ProductionReport) -> pr.CostItems) #>
|> LeafExpressionConverter.QuotationToExpression
|> unbox<Expression<Func<ProductionReport, seq<CostItem>>>>
let a = builder.HasOne(toProdRptExpr)
let b = a.WithMany(toCostItemsExpr)
that's a lot more verbose than it needs to be, but it helped me figure out how the types fit together.
EDIT
For brevity, you can create a function like
let toExpr (f:'a -> 'b) =
<# Func<_,_> (f) #>
|> LeafExpressionConverter.QuotationToExpression
|> unbox<Expression<Func<'a, 'b>>>
and then use it like
builder
.HasOne(toExpr(fun (i:CostItem) -> i.ProductionReport))
.WithMany(toExpr(fun (pr:ProductionReport) -> pr.CostItems))
But you have to be careful because it looks like CostItem and ProductionReport are mutually referential (see the discussion in comments below). That means they need to be defined in the same file and use the and keyword (see this example)
What is the proper way to pass query string parameters to bs-fetch?
Currently, I have:
Fetch.fetch("https://example.com/api?param1=value1¶m2=value2")
Obviously, this is not sustainable for larger parameter lists.
Is there a better way to do this?
re:fetch supports query params by way of either
request("https://example.com/api",
~queryParams=[
("param1", "value1"),
("param2", "value2")
])
|> fetch;
or
request("https://example.com/api")
|> Request.param("param1", "value1")
|> Request.param("param2", "value2")
|> fetch;
Beware that the library is experimental though. Alternatively, you could just swipe the query builder code, which has been battle-tested at least a little bit (there's a subtle bug in #monssef's implementation when there's an empty list, and it also doesn't do proper encoding):
[#bs.val] external encodeURIComponent : string => string = "";
let _buildUrl = (url, params) => {
let encodeParam = ((key, value)) =>
encodeURIComponent(key) ++ "=" ++ encodeURIComponent(value);
let params =
params |> List.map(encodeParam)
|> String.joinWith("&");
switch params {
| "" => url
| _ => {j|$url?$params|j}
};
};
i don't think there's something builtin for that.
just make your own query builder function, something like this
let payload = Js.Dict.empty();
Js.Dict.set(payload, "email", Js.Json.string("email#email.co"));
Js.Dict.set(payload, "password", Js.Json.string("secret"));
let query =
Js.Dict.keys(payload)
|> Array.fold_left(
(query, key) =>
switch (Js.Dict.get(payload, key)) {
| Some(value) =>
query ++ key ++ "=" ++ Js.Json.stringify(value) ++ "&"
| _ => query
},
"?"
);
here's a link to the playground.
When trying to build form validations my error shows up in the form when I use a username exceeding 20 char, but not when I enter nothing. I get a violates non-null constraint Postgres.Error view.
# user.ex
def changeset(model, params \\ %{}) do
model
|> cast(params, ~w(name username), [])
|> validate_length(:username, min: 1, max: 20)
end
Which probably is because of the migration:
def change do
create table(:users) do
add :name, :string
add :username, :string, null: false
add :password_hash, :string
timestamps
end
create unique_index(:users, [:username])
end
Going through the Programming Phoenix book, which is unfortunately getting a little outdated, I can't find a quick solution to this problem.
Somehow the Postgres shouldn't come before the validation checks. Any idea on how to make this error go away?
If it doesn't work, I assume you're using Phoenix 1.3, so try changing this code
def changeset(model, params \\ %{}) do
model
|> cast(params, ~w(name username), [])
|> validate_length(:username, min: 1, max: 20)
end
With this:
def changeset(model, params \\ %{}) do
model
|> cast(params, ~w(name username))
|> validate_required([:username])
|> validate_length(:username, min: 1, max: 20)
end
You can check more details in documentation of Ecto.Changeset.validate_required/3.
Hope that helps!
I'm having trouble with a custom Ecto type that I'm writing. It is be backed by %Postgrex.Range{} type.
The code is
defmodule Foo.Ecto.DateRange do
#behaviour Ecto.Type
def type, do: :daterange
def cast(%{"lower" => lower, "upper" => upper}) do
new_lower = Date.from_iso8601! lower
new_upper = Date.from_iso8601! upper
{:ok, Date.range(new_lower, new_upper)}
end
def cast(%Date.Range{}=range) do
{:ok, range}
end
def cast(_), do: :error
def load(%Postgrex.Range{lower: lower, upper: upper}) do
{:ok, Date.range(lower, upper)}
end
def load(_), do: :error
def dump(%Date.Range{}=range) do
{:ok, %Postgrex.Range{lower: range.first, upper: range.last}}
end
def dump(_), do: :error
end
The migration is
def change do
create table(:users) do
add :email, :string, null: false
add :username, :string
add :name, :string, null: false
add :password_hash, :text, null: false
add :period, :daterange
timestamps()
end
The user schema is
schema "users" do
field :username, :string
field :name, :string
field :email, :string
field :password_hash, :string
field :password, :string, virtual: true
field :period, Foo.Ecto.DateRange
The problematic code in my seeds.exs is this one:
today = Date.utc_today()
{:ok, user2} = create_user %{name: "Gloubi Boulga",
email: "gloub#boul.ga", password: "xptdr32POD?é23PRK*efz",
period: Date.range(today, Timex.shift(today, months: 2))
}
And finally, the error is this one:
* (CaseClauseError) no case clause matching: {~D[2017-11-04]}
(ecto) lib/ecto/adapters/postgres/datetime.ex:40: Ecto.Adapters.Postgres.TypeModule.encode_value/2
(ecto) /home/tchoutri/dev/Projects/Foo/deps/postgrex/lib/postgrex/type_module.ex:717: Ecto.Adapters.Postgres.TypeModule.encode_params/3
[…]
priv/repo/seeds.exs:33: anonymous fn/0 in :elixir_compiler_1.__FILE__/1
And of course, I do not understand why this kind of conversion is happening, and this is very frustrating, especially considering that creating a custom Ecto type backed by %Postgrex.Range{} should be somewhat trivial.
EDIT: I've put some Logger.debug in the cast function and I can see
[debug] Casting new_date #DateRange<~D[2017-11-11], ~D[2018-01-11]>
appearing and
%Postgrex.Range{lower: ~D[2017-11-11], lower_inclusive: true, upper: ~D[2018-01-11], upper_inclusive: true}
in the dump function.
Within a %Postgrex.Range{}, the current version of Postgrex (0.13.3) expects %Postgrex.Date{}s. See the relevant test here.
However as seen in the link, %Postgrex.Date{} is deprecated in the next release and you are expected to use %Date{} from 0.14 onwards (still in development).
I came across this today. I hope this still helps:
def dump(%Date.Range{} = range) do
{:ok, %Postgrex.Range{lower: Date.to_erl(range.first), upper: Date.to_erl(range.last)}}
end
Here's what I ended up with:
defmodule DateRange do
#moduledoc false
#behaviour Ecto.Type
#doc """
Does use the `:tsrange` postgrex type.
"""
def type, do: :daterange
#doc """
Can cast various formats:
# Simple maps (default to `[]` semantic like Date.range)
%{"lower" => "2015-01-23", "upper" => "2015-01-23"}
# Postgrex range with Date structs for upper and lower bound
%Postgrex.Range{lower: #Date<2015-01-23>, upper: #Date<2015-01-23>}
"""
def cast(%Date.Range{first: lower, last: upper}), do: cast(%{lower: lower, up
per: upper})
def cast(%{"lower" => lower, "upper" => upper}), do: cast(%{lower: lower, uppe
r: upper})
def cast(%Postgrex.Range{lower: %Date{}, upper: %Date{}} = range), do: {:ok, r
ange}
def cast(%{lower: %Date{} = lower, upper: %Date{} = upper}) do
{:ok, %Postgrex.Range{lower: lower, upper: upper}}
end
def cast(%{lower: lower, upper: upper}) do
try do
with {:ok, new_lower, 0} <- Date.from_iso8601(lower),
{:ok, new_upper, 0} <- Date.from_iso8601(upper) do
{:ok, %Postgrex.Range{lower: new_lower, upper: new_upper}}
else
_ -> :error
end
rescue
FunctionClauseError -> :error
end
end
def cast(_), do: :error
#end_of_times ~D[9999-12-31]
#start_of_times ~D[0000-01-01]
defp canonicalize_bounds(date, inclusive, offset, infinite_bound) do
with {:ok, date} <- Date.from_erl(date) do
case inclusive do
false -> {:ok, Timex.shift(date, days: offset)}
true -> {:ok, date}
end
else
^inclusive = false when is_nil(date) -> {:ok, infinite_bound}
_ -> :error
end
end
#doc """
Does load the postgrex returned range and converts data back to Date structs.
"""
def load(%Postgrex.Range{lower: lower, lower_inclusive: lower_inclusive,
upper: upper, upper_inclusive: upper_inclusive}) do
with {:ok, lower} <- canonicalize_bounds(lower, lower_inclusive, 1, #start_
of_times),
{:ok, upper} <- canonicalize_bounds(upper, upper_inclusive, -1, #end_of
_times) do
{:ok, Date.range(lower, upper)}
else
_ -> :error
end
end
def load(_), do: :error
#doc """
Does convert the Date bounds into erl format for the db.
"""
def dump(%Postgrex.Range{lower: %Date{} = lower, upper: %Date{} = upper} = range) do
with {:ok, lower} <- Ecto.DataType.dump(lower),
{:ok, upper} <- Ecto.DataType.dump(upper) do
{:ok, %{range | lower: lower, upper: upper}}
else
_ -> :error
end
end
def dump(_), do: :error
end